lfx-nightly 0.1.11.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +268 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +346 -0
- lfx/base/agents/utils.py +205 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +1291 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +685 -0
- lfx/base/data/docling_utils.py +245 -0
- lfx/base/data/utils.py +198 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +20 -0
- lfx/base/io/text.py +22 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1398 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +47 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/groq_constants.py +134 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +307 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +8 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +49 -0
- lfx/base/models/openai_constants.py +122 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +224 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +319 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +441 -0
- lfx/cli/script_loader.py +247 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +411 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/agents/__init__.py +34 -0
- lfx/components/agents/agent.py +558 -0
- lfx/components/agents/mcp_component.py +501 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +112 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +124 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +163 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +167 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/composio/__init__.py +74 -0
- lfx/components/composio/composio_api.py +268 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/slack_composio.py +582 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +107 -0
- lfx/components/crewai/hierarchical_crew.py +46 -0
- lfx/components/crewai/hierarchical_task.py +44 -0
- lfx/components/crewai/sequential_crew.py +52 -0
- lfx/components/crewai/sequential_task.py +73 -0
- lfx/components/crewai/sequential_task_agent.py +143 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +64 -0
- lfx/components/data/api_request.py +544 -0
- lfx/components/data/csv_to_data.py +95 -0
- lfx/components/data/directory.py +113 -0
- lfx/components/data/file.py +577 -0
- lfx/components/data/json_to_data.py +98 -0
- lfx/components/data/news_search.py +164 -0
- lfx/components/data/rss.py +69 -0
- lfx/components/data/sql_executor.py +101 -0
- lfx/components/data/url.py +311 -0
- lfx/components/data/web_search.py +112 -0
- lfx/components/data/webhook.py +56 -0
- lfx/components/datastax/__init__.py +70 -0
- lfx/components/datastax/astra_assistant_manager.py +306 -0
- lfx/components/datastax/astra_db.py +75 -0
- lfx/components/datastax/astra_vectorize.py +124 -0
- lfx/components/datastax/astradb.py +1285 -0
- lfx/components/datastax/astradb_cql.py +314 -0
- lfx/components/datastax/astradb_graph.py +330 -0
- lfx/components/datastax/astradb_tool.py +414 -0
- lfx/components/datastax/astradb_vectorstore.py +1285 -0
- lfx/components/datastax/cassandra.py +92 -0
- lfx/components/datastax/create_assistant.py +58 -0
- lfx/components/datastax/create_thread.py +32 -0
- lfx/components/datastax/dotenv.py +35 -0
- lfx/components/datastax/get_assistant.py +37 -0
- lfx/components/datastax/getenvvar.py +30 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +314 -0
- lfx/components/datastax/list_assistants.py +25 -0
- lfx/components/datastax/run.py +89 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +231 -0
- lfx/components/docling/docling_remote.py +193 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +243 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +76 -0
- lfx/components/embeddings/text_embedder.py +64 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +192 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +147 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +136 -0
- lfx/components/helpers/__init__.py +52 -0
- lfx/components/helpers/calculator_core.py +89 -0
- lfx/components/helpers/create_list.py +40 -0
- lfx/components/helpers/current_date.py +42 -0
- lfx/components/helpers/id_generator.py +42 -0
- lfx/components/helpers/memory.py +251 -0
- lfx/components/helpers/output_parser.py +45 -0
- lfx/components/helpers/store_message.py +90 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +197 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +203 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +38 -0
- lfx/components/input_output/chat.py +120 -0
- lfx/components/input_output/chat_output.py +200 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +107 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +45 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +129 -0
- lfx/components/logic/__init__.py +52 -0
- lfx/components/logic/conditional_router.py +171 -0
- lfx/components/logic/data_conditional_router.py +125 -0
- lfx/components/logic/flow_tool.py +110 -0
- lfx/components/logic/listen.py +29 -0
- lfx/components/logic/loop.py +125 -0
- lfx/components/logic/notify.py +88 -0
- lfx/components/logic/pass_message.py +35 -0
- lfx/components/logic/run_flow.py +71 -0
- lfx/components/logic/sub_flow.py +114 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +136 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +34 -0
- lfx/components/models/embedding_model.py +114 -0
- lfx/components/models/language_model.py +144 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +157 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +330 -0
- lfx/components/ollama/ollama_embeddings.py +106 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +202 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +117 -0
- lfx/components/processing/alter_metadata.py +108 -0
- lfx/components/processing/batch_run.py +205 -0
- lfx/components/processing/combine_text.py +39 -0
- lfx/components/processing/converter.py +159 -0
- lfx/components/processing/create_data.py +110 -0
- lfx/components/processing/data_operations.py +438 -0
- lfx/components/processing/data_to_dataframe.py +70 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/extract_key.py +53 -0
- lfx/components/processing/filter_data.py +42 -0
- lfx/components/processing/filter_data_values.py +88 -0
- lfx/components/processing/json_cleaner.py +103 -0
- lfx/components/processing/lambda_filter.py +154 -0
- lfx/components/processing/llm_router.py +499 -0
- lfx/components/processing/merge_data.py +90 -0
- lfx/components/processing/message_to_data.py +36 -0
- lfx/components/processing/parse_data.py +70 -0
- lfx/components/processing/parse_dataframe.py +68 -0
- lfx/components/processing/parse_json_data.py +90 -0
- lfx/components/processing/parser.py +143 -0
- lfx/components/processing/prompt.py +67 -0
- lfx/components/processing/python_repl_core.py +98 -0
- lfx/components/processing/regex.py +82 -0
- lfx/components/processing/save_file.py +225 -0
- lfx/components/processing/select_data.py +48 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/structured_output.py +202 -0
- lfx/components/processing/update_data.py +160 -0
- lfx/components/prototypes/__init__.py +34 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +72 -0
- lfx/components/tools/calculator.py +108 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +327 -0
- lfx/components/tools/python_repl.py +97 -0
- lfx/components/tools/search_api.py +87 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +119 -0
- lfx/components/tools/tavily_search_tool.py +344 -0
- lfx/components/tools/wikidata_api.py +102 -0
- lfx/components/tools/wikipedia_api.py +49 -0
- lfx/components/tools/yahoo_finance.py +129 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +291 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +179 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +40 -0
- lfx/components/vectorstores/astradb.py +1285 -0
- lfx/components/vectorstores/astradb_graph.py +319 -0
- lfx/components/vectorstores/cassandra.py +264 -0
- lfx/components/vectorstores/cassandra_graph.py +238 -0
- lfx/components/vectorstores/chroma.py +167 -0
- lfx/components/vectorstores/clickhouse.py +135 -0
- lfx/components/vectorstores/couchbase.py +102 -0
- lfx/components/vectorstores/elasticsearch.py +267 -0
- lfx/components/vectorstores/faiss.py +111 -0
- lfx/components/vectorstores/graph_rag.py +141 -0
- lfx/components/vectorstores/hcd.py +314 -0
- lfx/components/vectorstores/local_db.py +261 -0
- lfx/components/vectorstores/milvus.py +115 -0
- lfx/components/vectorstores/mongodb_atlas.py +213 -0
- lfx/components/vectorstores/opensearch.py +243 -0
- lfx/components/vectorstores/pgvector.py +72 -0
- lfx/components/vectorstores/pinecone.py +134 -0
- lfx/components/vectorstores/qdrant.py +109 -0
- lfx/components/vectorstores/supabase.py +76 -0
- lfx/components/vectorstores/upstash.py +124 -0
- lfx/components/vectorstores/vectara.py +97 -0
- lfx/components/vectorstores/vectara_rag.py +164 -0
- lfx/components/vectorstores/weaviate.py +89 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +118 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +44 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +86 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1808 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +588 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +488 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +215 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +277 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2238 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +52 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +237 -0
- lfx/graph/utils.py +200 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +823 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +264 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +1 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +194 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +328 -0
- lfx/inputs/inputs.py +714 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +489 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +224 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +289 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +385 -0
- lfx/memory/__init__.py +90 -0
- lfx/memory/stubs.py +283 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/data.py +308 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +131 -0
- lfx/schema/json_schema.py +141 -0
- lfx/schema/log.py +61 -0
- lfx/schema/message.py +473 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +171 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +140 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +23 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/deps.py +129 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +172 -0
- lfx/services/schema.py +20 -0
- lfx/services/session.py +82 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +130 -0
- lfx/services/settings/base.py +539 -0
- lfx/services/settings/constants.py +31 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +12 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +155 -0
- lfx/services/storage/service.py +54 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +257 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +212 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +205 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +28 -0
- lfx/utils/image.py +73 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/util.py +481 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.1.11.dev0.dist-info/METADATA +293 -0
- lfx_nightly-0.1.11.dev0.dist-info/RECORD +699 -0
- lfx_nightly-0.1.11.dev0.dist-info/WHEEL +4 -0
- lfx_nightly-0.1.11.dev0.dist-info/entry_points.txt +2 -0
lfx/graph/graph/utils.py
ADDED
@@ -0,0 +1,1024 @@
|
|
1
|
+
import copy
|
2
|
+
from collections import defaultdict, deque
|
3
|
+
from collections.abc import Callable
|
4
|
+
from typing import Any
|
5
|
+
|
6
|
+
import networkx as nx
|
7
|
+
|
8
|
+
PRIORITY_LIST_OF_INPUTS = ["webhook", "chat"]
|
9
|
+
MAX_CYCLE_APPEARANCES = 2
|
10
|
+
|
11
|
+
|
12
|
+
def find_start_component_id(vertices, *, is_webhook: bool = False):
|
13
|
+
"""Finds the component ID from a list of vertices based on a priority list of input types.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
vertices (list): A list of vertex IDs.
|
17
|
+
is_webhook (bool, optional): Whether the flow is being run as a webhook. Defaults to False.
|
18
|
+
|
19
|
+
Returns:
|
20
|
+
str or None: The component ID that matches the highest priority input type, or None if no match is found.
|
21
|
+
"""
|
22
|
+
# Set priority list based on whether this is a webhook flow
|
23
|
+
priority_inputs = ["webhook"] if is_webhook else PRIORITY_LIST_OF_INPUTS
|
24
|
+
|
25
|
+
# Check input types in priority order
|
26
|
+
for input_type_str in priority_inputs:
|
27
|
+
component_id = next((vertex_id for vertex_id in vertices if input_type_str in vertex_id.lower()), None)
|
28
|
+
if component_id:
|
29
|
+
return component_id
|
30
|
+
return None
|
31
|
+
|
32
|
+
|
33
|
+
def find_last_node(nodes, edges):
|
34
|
+
"""This function receives a flow and returns the last node."""
|
35
|
+
source_ids = {edge["source"] for edge in edges}
|
36
|
+
for node in nodes:
|
37
|
+
if node["id"] not in source_ids:
|
38
|
+
return node
|
39
|
+
return None
|
40
|
+
|
41
|
+
|
42
|
+
def add_parent_node_id(nodes, parent_node_id) -> None:
|
43
|
+
"""This function receives a list of nodes and adds a parent_node_id to each node."""
|
44
|
+
for node in nodes:
|
45
|
+
node["parent_node_id"] = parent_node_id
|
46
|
+
|
47
|
+
|
48
|
+
def add_frozen(nodes, frozen) -> None:
|
49
|
+
"""This function receives a list of nodes and adds a frozen to each node."""
|
50
|
+
for node in nodes:
|
51
|
+
node["data"]["node"]["frozen"] = frozen
|
52
|
+
|
53
|
+
|
54
|
+
def ungroup_node(group_node_data, base_flow):
|
55
|
+
template, flow, frozen = (
|
56
|
+
group_node_data["node"]["template"],
|
57
|
+
group_node_data["node"]["flow"],
|
58
|
+
group_node_data["node"].get("frozen", False),
|
59
|
+
)
|
60
|
+
parent_node_id = group_node_data["id"]
|
61
|
+
|
62
|
+
g_nodes = flow["data"]["nodes"]
|
63
|
+
add_parent_node_id(g_nodes, parent_node_id)
|
64
|
+
add_frozen(g_nodes, frozen)
|
65
|
+
g_edges = flow["data"]["edges"]
|
66
|
+
|
67
|
+
# Redirect edges to the correct proxy node
|
68
|
+
updated_edges = get_updated_edges(base_flow, g_nodes, g_edges, group_node_data["id"])
|
69
|
+
|
70
|
+
# Update template values
|
71
|
+
update_template(template, g_nodes)
|
72
|
+
|
73
|
+
nodes = [n for n in base_flow["nodes"] if n["id"] != group_node_data["id"]] + g_nodes
|
74
|
+
edges = (
|
75
|
+
[e for e in base_flow["edges"] if e["target"] != group_node_data["id"] and e["source"] != group_node_data["id"]]
|
76
|
+
+ g_edges
|
77
|
+
+ updated_edges
|
78
|
+
)
|
79
|
+
|
80
|
+
base_flow["nodes"] = nodes
|
81
|
+
base_flow["edges"] = edges
|
82
|
+
|
83
|
+
return nodes
|
84
|
+
|
85
|
+
|
86
|
+
def process_flow(flow_object):
|
87
|
+
cloned_flow = copy.deepcopy(flow_object)
|
88
|
+
processed_nodes = set() # To keep track of processed nodes
|
89
|
+
|
90
|
+
def process_node(node) -> None:
|
91
|
+
node_id = node.get("id")
|
92
|
+
|
93
|
+
# If node already processed, skip
|
94
|
+
if node_id in processed_nodes:
|
95
|
+
return
|
96
|
+
|
97
|
+
if node.get("data") and node["data"].get("node") and node["data"]["node"].get("flow"):
|
98
|
+
process_flow(node["data"]["node"]["flow"]["data"])
|
99
|
+
new_nodes = ungroup_node(node["data"], cloned_flow)
|
100
|
+
# Add new nodes to the queue for future processing
|
101
|
+
nodes_to_process.extend(new_nodes)
|
102
|
+
|
103
|
+
# Mark node as processed
|
104
|
+
processed_nodes.add(node_id)
|
105
|
+
|
106
|
+
nodes_to_process = deque(cloned_flow["nodes"])
|
107
|
+
|
108
|
+
while nodes_to_process:
|
109
|
+
node = nodes_to_process.popleft()
|
110
|
+
process_node(node)
|
111
|
+
|
112
|
+
return cloned_flow
|
113
|
+
|
114
|
+
|
115
|
+
def update_template(template, g_nodes) -> None:
|
116
|
+
"""Updates the template of a node in a graph with the given template.
|
117
|
+
|
118
|
+
Args:
|
119
|
+
template (dict): The new template to update the node with.
|
120
|
+
g_nodes (list): The list of nodes in the graph.
|
121
|
+
"""
|
122
|
+
for value in template.values():
|
123
|
+
if not value.get("proxy"):
|
124
|
+
continue
|
125
|
+
proxy_dict = value["proxy"]
|
126
|
+
field, id_ = proxy_dict["field"], proxy_dict["id"]
|
127
|
+
node_index = next((i for i, n in enumerate(g_nodes) if n["id"] == id_), -1)
|
128
|
+
if node_index != -1:
|
129
|
+
display_name = None
|
130
|
+
show = g_nodes[node_index]["data"]["node"]["template"][field]["show"]
|
131
|
+
advanced = g_nodes[node_index]["data"]["node"]["template"][field]["advanced"]
|
132
|
+
if "display_name" in g_nodes[node_index]["data"]["node"]["template"][field]:
|
133
|
+
display_name = g_nodes[node_index]["data"]["node"]["template"][field]["display_name"]
|
134
|
+
else:
|
135
|
+
display_name = g_nodes[node_index]["data"]["node"]["template"][field]["name"]
|
136
|
+
|
137
|
+
g_nodes[node_index]["data"]["node"]["template"][field] = value
|
138
|
+
g_nodes[node_index]["data"]["node"]["template"][field]["show"] = show
|
139
|
+
g_nodes[node_index]["data"]["node"]["template"][field]["advanced"] = advanced
|
140
|
+
g_nodes[node_index]["data"]["node"]["template"][field]["display_name"] = display_name
|
141
|
+
|
142
|
+
|
143
|
+
def update_target_handle(new_edge, g_nodes):
|
144
|
+
"""Updates the target handle of a given edge if it is a proxy node.
|
145
|
+
|
146
|
+
Args:
|
147
|
+
new_edge (dict): The edge to update.
|
148
|
+
g_nodes (list): The list of nodes in the graph.
|
149
|
+
|
150
|
+
Returns:
|
151
|
+
dict: The updated edge.
|
152
|
+
"""
|
153
|
+
target_handle = new_edge["data"]["targetHandle"]
|
154
|
+
if proxy := target_handle.get("proxy"):
|
155
|
+
proxy_id = proxy["id"]
|
156
|
+
for node in g_nodes:
|
157
|
+
if node["id"] == proxy_id:
|
158
|
+
set_new_target_handle(proxy_id, new_edge, target_handle, node)
|
159
|
+
break
|
160
|
+
|
161
|
+
return new_edge
|
162
|
+
|
163
|
+
|
164
|
+
def set_new_target_handle(proxy_id, new_edge, target_handle, node) -> None:
|
165
|
+
"""Sets a new target handle for a given edge.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
proxy_id (str): The ID of the proxy.
|
169
|
+
new_edge (dict): The new edge to be created.
|
170
|
+
target_handle (dict): The target handle of the edge.
|
171
|
+
node (dict): The node containing the edge.
|
172
|
+
"""
|
173
|
+
new_edge["target"] = proxy_id
|
174
|
+
type_ = target_handle.get("type")
|
175
|
+
if type_ is None:
|
176
|
+
msg = "The 'type' key must be present in target_handle."
|
177
|
+
raise KeyError(msg)
|
178
|
+
|
179
|
+
field = target_handle["proxy"]["field"]
|
180
|
+
new_target_handle = {
|
181
|
+
"fieldName": field,
|
182
|
+
"type": type_,
|
183
|
+
"id": proxy_id,
|
184
|
+
}
|
185
|
+
|
186
|
+
node_data = node["data"]["node"]
|
187
|
+
if node_data.get("flow"):
|
188
|
+
field_template_proxy = node_data["template"][field]["proxy"]
|
189
|
+
new_target_handle["proxy"] = {
|
190
|
+
"field": field_template_proxy["field"],
|
191
|
+
"id": field_template_proxy["id"],
|
192
|
+
}
|
193
|
+
|
194
|
+
if input_types := target_handle.get("inputTypes"):
|
195
|
+
new_target_handle["inputTypes"] = input_types
|
196
|
+
|
197
|
+
new_edge["data"]["targetHandle"] = new_target_handle
|
198
|
+
|
199
|
+
|
200
|
+
def update_source_handle(new_edge, g_nodes, g_edges):
|
201
|
+
"""Updates the source handle of a given edge to the last node in the flow data.
|
202
|
+
|
203
|
+
Args:
|
204
|
+
new_edge (dict): The edge to update.
|
205
|
+
g_nodes: The graph nodes.
|
206
|
+
g_edges: The graph edges.
|
207
|
+
|
208
|
+
Returns:
|
209
|
+
dict: The updated edge with the new source handle.
|
210
|
+
"""
|
211
|
+
last_node = copy.deepcopy(find_last_node(g_nodes, g_edges))
|
212
|
+
new_edge["source"] = last_node["id"]
|
213
|
+
new_source_handle = new_edge["data"]["sourceHandle"]
|
214
|
+
new_source_handle["id"] = last_node["id"]
|
215
|
+
new_edge["data"]["sourceHandle"] = new_source_handle
|
216
|
+
return new_edge
|
217
|
+
|
218
|
+
|
219
|
+
def get_updated_edges(base_flow, g_nodes, g_edges, group_node_id):
|
220
|
+
"""Get updated edges.
|
221
|
+
|
222
|
+
Given a base flow, a list of graph nodes and a group node id, returns a list of updated edges.
|
223
|
+
An updated edge is an edge that has its target or source handle updated based on the group node id.
|
224
|
+
|
225
|
+
Args:
|
226
|
+
base_flow (dict): The base flow containing a list of edges.
|
227
|
+
g_nodes (list): A list of graph nodes.
|
228
|
+
g_edges (list): A list of graph edges.
|
229
|
+
group_node_id (str): The id of the group node.
|
230
|
+
|
231
|
+
Returns:
|
232
|
+
list: A list of updated edges.
|
233
|
+
"""
|
234
|
+
updated_edges = []
|
235
|
+
for edge in base_flow["edges"]:
|
236
|
+
new_edge = copy.deepcopy(edge)
|
237
|
+
if new_edge["target"] == group_node_id:
|
238
|
+
new_edge = update_target_handle(new_edge, g_nodes)
|
239
|
+
|
240
|
+
if new_edge["source"] == group_node_id:
|
241
|
+
new_edge = update_source_handle(new_edge, g_nodes, g_edges)
|
242
|
+
|
243
|
+
if group_node_id in {edge["target"], edge["source"]}:
|
244
|
+
updated_edges.append(new_edge)
|
245
|
+
return updated_edges
|
246
|
+
|
247
|
+
|
248
|
+
def get_successors(graph: dict[str, dict[str, list[str]]], vertex_id: str) -> list[str]:
|
249
|
+
successors_result = []
|
250
|
+
stack = [vertex_id]
|
251
|
+
visited = set()
|
252
|
+
while stack:
|
253
|
+
current_id = stack.pop()
|
254
|
+
if current_id in visited:
|
255
|
+
continue
|
256
|
+
visited.add(current_id)
|
257
|
+
if current_id != vertex_id:
|
258
|
+
successors_result.append(current_id)
|
259
|
+
stack.extend(graph[current_id]["successors"])
|
260
|
+
return successors_result
|
261
|
+
|
262
|
+
|
263
|
+
def get_root_of_group_node(
|
264
|
+
graph: dict[str, dict[str, list[str]]], vertex_id: str, parent_node_map: dict[str, str | None]
|
265
|
+
) -> str:
|
266
|
+
"""Returns the root of a group node."""
|
267
|
+
if vertex_id in parent_node_map.values():
|
268
|
+
# Get all vertices with vertex_id as their parent node
|
269
|
+
child_vertices = [v_id for v_id, parent_id in parent_node_map.items() if parent_id == vertex_id]
|
270
|
+
|
271
|
+
# Now go through successors of the child vertices
|
272
|
+
# and get the one that none of its successors is in child_vertices
|
273
|
+
for child_id in child_vertices:
|
274
|
+
successors = get_successors(graph, child_id)
|
275
|
+
if not any(successor in child_vertices for successor in successors):
|
276
|
+
return child_id
|
277
|
+
|
278
|
+
msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found"
|
279
|
+
raise ValueError(msg)
|
280
|
+
|
281
|
+
|
282
|
+
def sort_up_to_vertex(
|
283
|
+
graph: dict[str, dict[str, list[str]]],
|
284
|
+
vertex_id: str,
|
285
|
+
*,
|
286
|
+
parent_node_map: dict[str, str | None] | None = None,
|
287
|
+
is_start: bool = False,
|
288
|
+
) -> list[str]:
|
289
|
+
"""Cuts the graph up to a given vertex and sorts the resulting subgraph."""
|
290
|
+
try:
|
291
|
+
stop_or_start_vertex = graph[vertex_id]
|
292
|
+
except KeyError as e:
|
293
|
+
if parent_node_map is None:
|
294
|
+
msg = "Parent node map is required to find the root of a group node"
|
295
|
+
raise ValueError(msg) from e
|
296
|
+
vertex_id = get_root_of_group_node(graph=graph, vertex_id=vertex_id, parent_node_map=parent_node_map)
|
297
|
+
if vertex_id not in graph:
|
298
|
+
msg = f"Vertex {vertex_id} not found into graph"
|
299
|
+
raise ValueError(msg) from e
|
300
|
+
stop_or_start_vertex = graph[vertex_id]
|
301
|
+
|
302
|
+
visited, excluded = set(), set()
|
303
|
+
stack = [vertex_id]
|
304
|
+
stop_predecessors = set(stop_or_start_vertex["predecessors"])
|
305
|
+
|
306
|
+
while stack:
|
307
|
+
current_id = stack.pop()
|
308
|
+
if current_id in visited or current_id in excluded:
|
309
|
+
continue
|
310
|
+
|
311
|
+
visited.add(current_id)
|
312
|
+
current_vertex = graph[current_id]
|
313
|
+
|
314
|
+
stack.extend(current_vertex["predecessors"])
|
315
|
+
|
316
|
+
if current_id == vertex_id or (current_id not in stop_predecessors and is_start):
|
317
|
+
for successor_id in current_vertex["successors"]:
|
318
|
+
if is_start:
|
319
|
+
stack.append(successor_id)
|
320
|
+
else:
|
321
|
+
excluded.add(successor_id)
|
322
|
+
for succ_id in get_successors(graph, successor_id):
|
323
|
+
if is_start:
|
324
|
+
stack.append(succ_id)
|
325
|
+
else:
|
326
|
+
excluded.add(succ_id)
|
327
|
+
|
328
|
+
return list(visited)
|
329
|
+
|
330
|
+
|
331
|
+
def has_cycle(vertex_ids: list[str], edges: list[tuple[str, str]]) -> bool:
|
332
|
+
"""Determines whether a directed graph represented by a list of vertices and edges contains a cycle.
|
333
|
+
|
334
|
+
Args:
|
335
|
+
vertex_ids (list[str]): A list of vertex IDs.
|
336
|
+
edges (list[tuple[str, str]]): A list of tuples representing directed edges between vertices.
|
337
|
+
|
338
|
+
Returns:
|
339
|
+
bool: True if the graph contains a cycle, False otherwise.
|
340
|
+
"""
|
341
|
+
# Build the graph as an adjacency list
|
342
|
+
graph = defaultdict(list)
|
343
|
+
for u, v in edges:
|
344
|
+
graph[u].append(v)
|
345
|
+
|
346
|
+
# Utility function to perform DFS
|
347
|
+
def dfs(v, visited, rec_stack) -> bool:
|
348
|
+
visited.add(v)
|
349
|
+
rec_stack.add(v)
|
350
|
+
|
351
|
+
for neighbor in graph[v]:
|
352
|
+
if neighbor not in visited:
|
353
|
+
if dfs(neighbor, visited, rec_stack):
|
354
|
+
return True
|
355
|
+
elif neighbor in rec_stack:
|
356
|
+
return True
|
357
|
+
|
358
|
+
rec_stack.remove(v)
|
359
|
+
return False
|
360
|
+
|
361
|
+
visited: set[str] = set()
|
362
|
+
rec_stack: set[str] = set()
|
363
|
+
|
364
|
+
return any(vertex not in visited and dfs(vertex, visited, rec_stack) for vertex in vertex_ids)
|
365
|
+
|
366
|
+
|
367
|
+
def find_cycle_edge(entry_point: str, edges: list[tuple[str, str]]) -> tuple[str, str]:
|
368
|
+
"""Find the edge that causes a cycle in a directed graph starting from a given entry point.
|
369
|
+
|
370
|
+
Args:
|
371
|
+
entry_point (str): The vertex ID from which to start the search.
|
372
|
+
edges (list[tuple[str, str]]): A list of tuples representing directed edges between vertices.
|
373
|
+
|
374
|
+
Returns:
|
375
|
+
tuple[str, str]: A tuple representing the edge that causes a cycle, or None if no cycle is found.
|
376
|
+
"""
|
377
|
+
# Build the graph as an adjacency list
|
378
|
+
graph = defaultdict(list)
|
379
|
+
for u, v in edges:
|
380
|
+
graph[u].append(v)
|
381
|
+
|
382
|
+
# Utility function to perform DFS
|
383
|
+
def dfs(v, visited, rec_stack):
|
384
|
+
visited.add(v)
|
385
|
+
rec_stack.add(v)
|
386
|
+
|
387
|
+
for neighbor in graph[v]:
|
388
|
+
if neighbor not in visited:
|
389
|
+
result = dfs(neighbor, visited, rec_stack)
|
390
|
+
if result:
|
391
|
+
return result
|
392
|
+
elif neighbor in rec_stack:
|
393
|
+
return (v, neighbor) # This edge causes the cycle
|
394
|
+
|
395
|
+
rec_stack.remove(v)
|
396
|
+
return None
|
397
|
+
|
398
|
+
visited: set[str] = set()
|
399
|
+
rec_stack: set[str] = set()
|
400
|
+
|
401
|
+
return dfs(entry_point, visited, rec_stack)
|
402
|
+
|
403
|
+
|
404
|
+
def find_all_cycle_edges(entry_point: str, edges: list[tuple[str, str]]) -> list[tuple[str, str]]:
|
405
|
+
"""Find all edges that cause cycles in a directed graph starting from a given entry point.
|
406
|
+
|
407
|
+
Args:
|
408
|
+
entry_point (str): The vertex ID from which to start the search.
|
409
|
+
edges (list[tuple[str, str]]): A list of tuples representing directed edges between vertices.
|
410
|
+
|
411
|
+
Returns:
|
412
|
+
list[tuple[str, str]]: A list of tuples representing edges that cause cycles.
|
413
|
+
"""
|
414
|
+
# Build the graph as an adjacency list
|
415
|
+
graph = defaultdict(list)
|
416
|
+
for u, v in edges:
|
417
|
+
graph[u].append(v)
|
418
|
+
|
419
|
+
# Utility function to perform DFS
|
420
|
+
def dfs(v, visited, rec_stack, cycle_edges):
|
421
|
+
visited.add(v)
|
422
|
+
rec_stack.add(v)
|
423
|
+
|
424
|
+
for neighbor in graph[v]:
|
425
|
+
if neighbor not in visited:
|
426
|
+
dfs(neighbor, visited, rec_stack, cycle_edges)
|
427
|
+
elif neighbor in rec_stack:
|
428
|
+
cycle_edges.append((v, neighbor)) # This edge causes a cycle
|
429
|
+
|
430
|
+
rec_stack.remove(v)
|
431
|
+
|
432
|
+
visited: set[str] = set()
|
433
|
+
rec_stack: set[str] = set()
|
434
|
+
cycle_edges: list[tuple[str, str]] = []
|
435
|
+
|
436
|
+
dfs(entry_point, visited, rec_stack, cycle_edges)
|
437
|
+
|
438
|
+
return cycle_edges
|
439
|
+
|
440
|
+
|
441
|
+
def should_continue(yielded_counts: dict[str, int], max_iterations: int | None) -> bool:
|
442
|
+
if max_iterations is None:
|
443
|
+
return True
|
444
|
+
return max(yielded_counts.values(), default=0) <= max_iterations
|
445
|
+
|
446
|
+
|
447
|
+
def find_cycle_vertices(edges):
|
448
|
+
graph = nx.DiGraph(edges)
|
449
|
+
|
450
|
+
# Initialize a set to collect vertices part of any cycle
|
451
|
+
cycle_vertices = set()
|
452
|
+
|
453
|
+
# Utilize the strong component feature in NetworkX to find cycles
|
454
|
+
for component in nx.strongly_connected_components(graph):
|
455
|
+
if len(component) > 1 or graph.has_edge(tuple(component)[0], tuple(component)[0]): # noqa: RUF015
|
456
|
+
cycle_vertices.update(component)
|
457
|
+
|
458
|
+
return sorted(cycle_vertices)
|
459
|
+
|
460
|
+
|
461
|
+
def layered_topological_sort(
|
462
|
+
vertices_ids: set[str],
|
463
|
+
in_degree_map: dict[str, int],
|
464
|
+
successor_map: dict[str, list[str]],
|
465
|
+
predecessor_map: dict[str, list[str]],
|
466
|
+
start_id: str | None = None,
|
467
|
+
cycle_vertices: set[str] | None = None,
|
468
|
+
is_input_vertex: Callable[[str], bool] | None = None, # noqa: ARG001
|
469
|
+
*,
|
470
|
+
is_cyclic: bool = False,
|
471
|
+
) -> list[list[str]]:
|
472
|
+
"""Performs a layered topological sort of the vertices in the graph.
|
473
|
+
|
474
|
+
Args:
|
475
|
+
vertices_ids: Set of vertex IDs to sort
|
476
|
+
in_degree_map: Map of vertex IDs to their in-degree
|
477
|
+
successor_map: Map of vertex IDs to their successors
|
478
|
+
predecessor_map: Map of vertex IDs to their predecessors
|
479
|
+
is_cyclic: Whether the graph is cyclic
|
480
|
+
start_id: ID of the start vertex (if any)
|
481
|
+
cycle_vertices: Set of vertices that form a cycle
|
482
|
+
is_input_vertex: Function to check if a vertex is an input vertex
|
483
|
+
|
484
|
+
Returns:
|
485
|
+
List of layers, where each layer is a list of vertex IDs
|
486
|
+
"""
|
487
|
+
# Queue for vertices with no incoming edges
|
488
|
+
cycle_vertices = cycle_vertices or set()
|
489
|
+
in_degree_map = in_degree_map.copy()
|
490
|
+
|
491
|
+
if is_cyclic and all(in_degree_map.values()):
|
492
|
+
# This means we have a cycle because all vertex have in_degree_map > 0
|
493
|
+
# because of this we set the queue to start on the start_id if it exists
|
494
|
+
if start_id is not None:
|
495
|
+
queue = deque([start_id])
|
496
|
+
# Reset in_degree for start_id to allow cycle traversal
|
497
|
+
in_degree_map[start_id] = 0
|
498
|
+
else:
|
499
|
+
# Find the chat input component
|
500
|
+
chat_input = find_start_component_id(vertices_ids)
|
501
|
+
if chat_input is None:
|
502
|
+
# If no input component is found, start with any vertex
|
503
|
+
queue = deque([next(iter(vertices_ids))])
|
504
|
+
in_degree_map[next(iter(vertices_ids))] = 0
|
505
|
+
else:
|
506
|
+
queue = deque([chat_input])
|
507
|
+
# Reset in_degree for chat_input to allow cycle traversal
|
508
|
+
in_degree_map[chat_input] = 0
|
509
|
+
else:
|
510
|
+
# Start with vertices that have no incoming edges or are input vertices
|
511
|
+
queue = deque(
|
512
|
+
vertex_id
|
513
|
+
for vertex_id in vertices_ids
|
514
|
+
if in_degree_map[vertex_id] == 0
|
515
|
+
# We checked if it is input but that caused the TextInput to be at the start
|
516
|
+
# or (is_input_vertex and is_input_vertex(vertex_id))
|
517
|
+
)
|
518
|
+
|
519
|
+
layers: list[list[str]] = []
|
520
|
+
visited = set()
|
521
|
+
cycle_counts = dict.fromkeys(vertices_ids, 0)
|
522
|
+
current_layer = 0
|
523
|
+
|
524
|
+
# Process the first layer separately to avoid duplicates
|
525
|
+
if queue:
|
526
|
+
layers.append([]) # Start the first layer
|
527
|
+
first_layer_vertices = set()
|
528
|
+
layer_size = len(queue)
|
529
|
+
for _ in range(layer_size):
|
530
|
+
vertex_id = queue.popleft()
|
531
|
+
if vertex_id not in first_layer_vertices:
|
532
|
+
first_layer_vertices.add(vertex_id)
|
533
|
+
visited.add(vertex_id)
|
534
|
+
cycle_counts[vertex_id] += 1
|
535
|
+
layers[current_layer].append(vertex_id)
|
536
|
+
|
537
|
+
for neighbor in successor_map[vertex_id]:
|
538
|
+
# only vertices in `vertices_ids` should be considered
|
539
|
+
# because vertices by have been filtered out
|
540
|
+
# in a previous step. All dependencies of theirs
|
541
|
+
# will be built automatically if required
|
542
|
+
if neighbor not in vertices_ids:
|
543
|
+
continue
|
544
|
+
|
545
|
+
in_degree_map[neighbor] -= 1 # 'remove' edge
|
546
|
+
if in_degree_map[neighbor] == 0:
|
547
|
+
queue.append(neighbor)
|
548
|
+
|
549
|
+
# if > 0 it might mean not all predecessors have added to the queue
|
550
|
+
# so we should process the neighbors predecessors
|
551
|
+
elif in_degree_map[neighbor] > 0:
|
552
|
+
for predecessor in predecessor_map[neighbor]:
|
553
|
+
if (
|
554
|
+
predecessor not in queue
|
555
|
+
and predecessor not in first_layer_vertices
|
556
|
+
and (in_degree_map[predecessor] == 0 or predecessor in cycle_vertices)
|
557
|
+
):
|
558
|
+
queue.append(predecessor)
|
559
|
+
|
560
|
+
current_layer += 1 # Next layer
|
561
|
+
|
562
|
+
# Process remaining layers normally, allowing cycle vertices to appear multiple times
|
563
|
+
while queue:
|
564
|
+
layers.append([]) # Start a new layer
|
565
|
+
layer_size = len(queue)
|
566
|
+
for _ in range(layer_size):
|
567
|
+
vertex_id = queue.popleft()
|
568
|
+
if vertex_id not in visited or (is_cyclic and cycle_counts[vertex_id] < MAX_CYCLE_APPEARANCES):
|
569
|
+
if vertex_id not in visited:
|
570
|
+
visited.add(vertex_id)
|
571
|
+
cycle_counts[vertex_id] += 1
|
572
|
+
layers[current_layer].append(vertex_id)
|
573
|
+
|
574
|
+
for neighbor in successor_map[vertex_id]:
|
575
|
+
# only vertices in `vertices_ids` should be considered
|
576
|
+
# because vertices by have been filtered out
|
577
|
+
# in a previous step. All dependencies of theirs
|
578
|
+
# will be built automatically if required
|
579
|
+
if neighbor not in vertices_ids:
|
580
|
+
continue
|
581
|
+
|
582
|
+
in_degree_map[neighbor] -= 1 # 'remove' edge
|
583
|
+
if in_degree_map[neighbor] == 0 and neighbor not in visited:
|
584
|
+
queue.append(neighbor)
|
585
|
+
# # If this is a cycle vertex, reset its in_degree to allow it to appear again
|
586
|
+
# if neighbor in cycle_vertices and neighbor in visited:
|
587
|
+
# in_degree_map[neighbor] = len(predecessor_map[neighbor])
|
588
|
+
|
589
|
+
# if > 0 it might mean not all predecessors have added to the queue
|
590
|
+
# so we should process the neighbors predecessors
|
591
|
+
elif in_degree_map[neighbor] > 0:
|
592
|
+
for predecessor in predecessor_map[neighbor]:
|
593
|
+
if predecessor not in queue and (
|
594
|
+
predecessor not in visited
|
595
|
+
or (is_cyclic and cycle_counts[predecessor] < MAX_CYCLE_APPEARANCES)
|
596
|
+
):
|
597
|
+
queue.append(predecessor)
|
598
|
+
|
599
|
+
current_layer += 1 # Next layer
|
600
|
+
|
601
|
+
# Remove empty layers
|
602
|
+
return [layer for layer in layers if layer]
|
603
|
+
|
604
|
+
|
605
|
+
def refine_layers(
|
606
|
+
initial_layers: list[list[str]],
|
607
|
+
successor_map: dict[str, list[str]],
|
608
|
+
) -> list[list[str]]:
|
609
|
+
"""Refines the layers of vertices to ensure proper dependency ordering.
|
610
|
+
|
611
|
+
Args:
|
612
|
+
initial_layers: Initial layers of vertices
|
613
|
+
successor_map: Map of vertex IDs to their successors
|
614
|
+
|
615
|
+
Returns:
|
616
|
+
Refined layers with proper dependency ordering
|
617
|
+
"""
|
618
|
+
# Map each vertex to its current layer
|
619
|
+
vertex_to_layer: dict[str, int] = {}
|
620
|
+
for layer_index, layer in enumerate(initial_layers):
|
621
|
+
for vertex in layer:
|
622
|
+
vertex_to_layer[vertex] = layer_index
|
623
|
+
|
624
|
+
refined_layers: list[list[str]] = [[] for _ in initial_layers] # Start with empty layers
|
625
|
+
new_layer_index_map = defaultdict(int)
|
626
|
+
|
627
|
+
# Map each vertex to its new layer index
|
628
|
+
# by finding the lowest layer index of its dependencies
|
629
|
+
# and subtracting 1
|
630
|
+
# If a vertex has no dependencies, it will be placed in the first layer
|
631
|
+
# If a vertex has dependencies, it will be placed in the lowest layer index of its dependencies
|
632
|
+
# minus 1
|
633
|
+
for vertex_id, deps in successor_map.items():
|
634
|
+
indexes = [vertex_to_layer[dep] for dep in deps if dep in vertex_to_layer]
|
635
|
+
new_layer_index = max(min(indexes, default=0) - 1, 0)
|
636
|
+
new_layer_index_map[vertex_id] = new_layer_index
|
637
|
+
|
638
|
+
for layer_index, layer in enumerate(initial_layers):
|
639
|
+
for vertex_id in layer:
|
640
|
+
# Place the vertex in the highest possible layer where its dependencies are met
|
641
|
+
new_layer_index = new_layer_index_map[vertex_id]
|
642
|
+
if new_layer_index > layer_index:
|
643
|
+
refined_layers[new_layer_index].append(vertex_id)
|
644
|
+
vertex_to_layer[vertex_id] = new_layer_index
|
645
|
+
else:
|
646
|
+
refined_layers[layer_index].append(vertex_id)
|
647
|
+
|
648
|
+
# Remove empty layers if any
|
649
|
+
return [layer for layer in refined_layers if layer]
|
650
|
+
|
651
|
+
|
652
|
+
def _max_dependency_index(
|
653
|
+
vertex_id: str,
|
654
|
+
index_map: dict[str, int],
|
655
|
+
get_vertex_successors: Callable[[str], list[str]],
|
656
|
+
) -> int:
|
657
|
+
"""Finds the highest index a given vertex's dependencies occupy in the same layer.
|
658
|
+
|
659
|
+
Args:
|
660
|
+
vertex_id: ID of the vertex to check
|
661
|
+
index_map: Map of vertex IDs to their indices in the layer
|
662
|
+
get_vertex_successors: Function to get the successor IDs of a vertex
|
663
|
+
|
664
|
+
Returns:
|
665
|
+
The highest index of the vertex's dependencies
|
666
|
+
"""
|
667
|
+
max_index = -1
|
668
|
+
for successor_id in get_vertex_successors(vertex_id):
|
669
|
+
successor_index = index_map.get(successor_id, -1)
|
670
|
+
max_index = max(successor_index, max_index)
|
671
|
+
return max_index
|
672
|
+
|
673
|
+
|
674
|
+
def _sort_single_layer_by_dependency(
|
675
|
+
layer: list[str],
|
676
|
+
get_vertex_successors: Callable[[str], list[str]],
|
677
|
+
) -> list[str]:
|
678
|
+
"""Sorts a single layer by dependency using a stable sorting method.
|
679
|
+
|
680
|
+
Args:
|
681
|
+
layer: List of vertex IDs in the layer
|
682
|
+
get_vertex_successors: Function to get the successor IDs of a vertex
|
683
|
+
|
684
|
+
Returns:
|
685
|
+
Sorted list of vertex IDs
|
686
|
+
"""
|
687
|
+
# Build a map of each vertex to its index in the layer for quick lookup.
|
688
|
+
index_map = {vertex: index for index, vertex in enumerate(layer)}
|
689
|
+
dependency_cache: dict[str, int] = {}
|
690
|
+
|
691
|
+
def max_dependency_index(vertex: str) -> int:
|
692
|
+
if vertex in dependency_cache:
|
693
|
+
return dependency_cache[vertex]
|
694
|
+
max_index = index_map[vertex]
|
695
|
+
for successor in get_vertex_successors(vertex):
|
696
|
+
if successor in index_map:
|
697
|
+
max_index = max(max_index, max_dependency_index(successor))
|
698
|
+
|
699
|
+
dependency_cache[vertex] = max_index
|
700
|
+
return max_index
|
701
|
+
|
702
|
+
return sorted(layer, key=max_dependency_index, reverse=True)
|
703
|
+
|
704
|
+
|
705
|
+
def sort_layer_by_dependency(
|
706
|
+
vertices_layers: list[list[str]],
|
707
|
+
get_vertex_successors: Callable[[str], list[str]],
|
708
|
+
) -> list[list[str]]:
|
709
|
+
"""Sorts the vertices in each layer by dependency, ensuring no vertex depends on a subsequent vertex.
|
710
|
+
|
711
|
+
Args:
|
712
|
+
vertices_layers: List of layers, where each layer is a list of vertex IDs
|
713
|
+
get_vertex_successors: Function to get the successor IDs of a vertex
|
714
|
+
|
715
|
+
Returns:
|
716
|
+
Sorted layers
|
717
|
+
"""
|
718
|
+
return [_sort_single_layer_by_dependency(layer, get_vertex_successors) for layer in vertices_layers]
|
719
|
+
|
720
|
+
|
721
|
+
def sort_chat_inputs_first(
|
722
|
+
vertices_layers: list[list[str]],
|
723
|
+
get_vertex_predecessors: Callable[[str], list[str]],
|
724
|
+
) -> list[list[str]]:
|
725
|
+
"""Sorts the vertices so that chat inputs come first in the layers.
|
726
|
+
|
727
|
+
Only one chat input is allowed in the entire graph.
|
728
|
+
|
729
|
+
Args:
|
730
|
+
vertices_layers: List of layers, where each layer is a list of vertex IDs
|
731
|
+
get_vertex_predecessors: Function to get the predecessor IDs of a vertex
|
732
|
+
|
733
|
+
Returns:
|
734
|
+
Sorted layers with single chat input first
|
735
|
+
|
736
|
+
Raises:
|
737
|
+
ValueError: If there are multiple chat inputs in the graph
|
738
|
+
"""
|
739
|
+
chat_input = None
|
740
|
+
chat_input_layer_idx = None
|
741
|
+
|
742
|
+
# Find chat input and validate only one exists
|
743
|
+
for layer_idx, layer in enumerate(vertices_layers):
|
744
|
+
for vertex_id in layer:
|
745
|
+
if "ChatInput" in vertex_id and get_vertex_predecessors(vertex_id):
|
746
|
+
return vertices_layers
|
747
|
+
if "ChatInput" in vertex_id:
|
748
|
+
if chat_input is not None:
|
749
|
+
msg = "Only one chat input is allowed in the graph"
|
750
|
+
raise ValueError(msg)
|
751
|
+
chat_input = vertex_id
|
752
|
+
chat_input_layer_idx = layer_idx
|
753
|
+
|
754
|
+
if not chat_input:
|
755
|
+
return vertices_layers
|
756
|
+
# If chat input already in first layer, just move it to index 0
|
757
|
+
if chat_input_layer_idx == 0:
|
758
|
+
# If chat input is alone in first layer, keep as-is
|
759
|
+
if len(vertices_layers[0]) == 1:
|
760
|
+
return vertices_layers
|
761
|
+
|
762
|
+
# Otherwise move chat input to its own layer at the start
|
763
|
+
vertices_layers[0].remove(chat_input)
|
764
|
+
return [[chat_input], *vertices_layers]
|
765
|
+
|
766
|
+
# Otherwise create new layers with chat input first
|
767
|
+
result_layers = []
|
768
|
+
for layer in vertices_layers:
|
769
|
+
layer_vertices = [v for v in layer if v != chat_input]
|
770
|
+
if layer_vertices:
|
771
|
+
result_layers.append(layer_vertices)
|
772
|
+
|
773
|
+
return [[chat_input], *result_layers]
|
774
|
+
|
775
|
+
|
776
|
+
def get_sorted_vertices(
|
777
|
+
vertices_ids: list[str],
|
778
|
+
cycle_vertices: set[str],
|
779
|
+
stop_component_id: str | None = None,
|
780
|
+
start_component_id: str | None = None,
|
781
|
+
graph_dict: dict[str, Any] | None = None,
|
782
|
+
in_degree_map: dict[str, int] | None = None,
|
783
|
+
successor_map: dict[str, list[str]] | None = None,
|
784
|
+
predecessor_map: dict[str, list[str]] | None = None,
|
785
|
+
is_input_vertex: Callable[[str], bool] | None = None,
|
786
|
+
get_vertex_predecessors: Callable[[str], list[str]] | None = None,
|
787
|
+
get_vertex_successors: Callable[[str], list[str]] | None = None,
|
788
|
+
*,
|
789
|
+
is_cyclic: bool = False,
|
790
|
+
) -> tuple[list[str], list[list[str]]]:
|
791
|
+
"""Get sorted vertices in a graph.
|
792
|
+
|
793
|
+
Args:
|
794
|
+
vertices_ids: List of vertex IDs to sort
|
795
|
+
cycle_vertices: Set of vertices that form a cycle
|
796
|
+
stop_component_id: ID of the stop component (if any)
|
797
|
+
start_component_id: ID of the start component (if any)
|
798
|
+
graph_dict: Dictionary containing graph information
|
799
|
+
in_degree_map: Map of vertex IDs to their in-degree
|
800
|
+
successor_map: Map of vertex IDs to their successors
|
801
|
+
predecessor_map: Map of vertex IDs to their predecessors
|
802
|
+
is_input_vertex: Function to check if a vertex is an input vertex
|
803
|
+
get_vertex_predecessors: Function to get predecessors of a vertex
|
804
|
+
get_vertex_successors: Function to get successors of a vertex
|
805
|
+
is_cyclic: Whether the graph is cyclic
|
806
|
+
|
807
|
+
Returns:
|
808
|
+
Tuple of (first layer vertices, remaining layer vertices)
|
809
|
+
"""
|
810
|
+
# Handle cycles by converting stop to start
|
811
|
+
if stop_component_id in cycle_vertices:
|
812
|
+
start_component_id = stop_component_id
|
813
|
+
stop_component_id = None
|
814
|
+
|
815
|
+
# Build in_degree_map if not provided
|
816
|
+
if in_degree_map is None:
|
817
|
+
in_degree_map = {}
|
818
|
+
for vertex_id in vertices_ids:
|
819
|
+
if get_vertex_predecessors is not None:
|
820
|
+
in_degree_map[vertex_id] = len(get_vertex_predecessors(vertex_id))
|
821
|
+
else:
|
822
|
+
in_degree_map[vertex_id] = 0
|
823
|
+
|
824
|
+
# Build successor_map if not provided
|
825
|
+
if successor_map is None:
|
826
|
+
successor_map = {}
|
827
|
+
for vertex_id in vertices_ids:
|
828
|
+
if get_vertex_successors is not None:
|
829
|
+
successor_map[vertex_id] = get_vertex_successors(vertex_id)
|
830
|
+
else:
|
831
|
+
successor_map[vertex_id] = []
|
832
|
+
|
833
|
+
# Build predecessor_map if not provided
|
834
|
+
if predecessor_map is None:
|
835
|
+
predecessor_map = {}
|
836
|
+
for vertex_id in vertices_ids:
|
837
|
+
if get_vertex_predecessors is not None:
|
838
|
+
predecessor_map[vertex_id] = get_vertex_predecessors(vertex_id)
|
839
|
+
else:
|
840
|
+
predecessor_map[vertex_id] = []
|
841
|
+
|
842
|
+
# If we have a stop component, we need to filter out all vertices
|
843
|
+
# that are not predecessors of the stop component
|
844
|
+
if stop_component_id is not None:
|
845
|
+
filtered_vertices = filter_vertices_up_to_vertex(
|
846
|
+
vertices_ids,
|
847
|
+
stop_component_id,
|
848
|
+
get_vertex_predecessors=get_vertex_predecessors,
|
849
|
+
get_vertex_successors=get_vertex_successors,
|
850
|
+
graph_dict=graph_dict,
|
851
|
+
)
|
852
|
+
vertices_ids = list(filtered_vertices)
|
853
|
+
|
854
|
+
# If we have a start component, we need to filter out unconnected vertices
|
855
|
+
# but keep vertices that are connected to the graph even if not reachable from start
|
856
|
+
if start_component_id is not None:
|
857
|
+
# First get all vertices reachable from start
|
858
|
+
reachable_vertices = filter_vertices_from_vertex(
|
859
|
+
vertices_ids,
|
860
|
+
start_component_id,
|
861
|
+
get_vertex_predecessors=get_vertex_predecessors,
|
862
|
+
get_vertex_successors=get_vertex_successors,
|
863
|
+
graph_dict=graph_dict,
|
864
|
+
)
|
865
|
+
# Then get all vertices that can reach any reachable vertex
|
866
|
+
connected_vertices = set()
|
867
|
+
for vertex in reachable_vertices:
|
868
|
+
connected_vertices.update(
|
869
|
+
filter_vertices_up_to_vertex(
|
870
|
+
vertices_ids,
|
871
|
+
vertex,
|
872
|
+
get_vertex_predecessors=get_vertex_predecessors,
|
873
|
+
get_vertex_successors=get_vertex_successors,
|
874
|
+
graph_dict=graph_dict,
|
875
|
+
)
|
876
|
+
)
|
877
|
+
vertices_ids = list(connected_vertices)
|
878
|
+
|
879
|
+
# Get the layers
|
880
|
+
layers = layered_topological_sort(
|
881
|
+
vertices_ids=set(vertices_ids),
|
882
|
+
in_degree_map=in_degree_map,
|
883
|
+
successor_map=successor_map,
|
884
|
+
predecessor_map=predecessor_map,
|
885
|
+
start_id=start_component_id,
|
886
|
+
is_input_vertex=is_input_vertex,
|
887
|
+
cycle_vertices=cycle_vertices,
|
888
|
+
is_cyclic=is_cyclic,
|
889
|
+
)
|
890
|
+
|
891
|
+
# Split into first layer and remaining layers
|
892
|
+
if not layers:
|
893
|
+
return [], []
|
894
|
+
|
895
|
+
first_layer = layers[0]
|
896
|
+
remaining_layers = layers[1:]
|
897
|
+
|
898
|
+
# If we have a stop component, we need to filter out all vertices
|
899
|
+
# that are not predecessors of the stop component
|
900
|
+
if stop_component_id is not None and remaining_layers and stop_component_id not in remaining_layers[-1]:
|
901
|
+
remaining_layers[-1].append(stop_component_id)
|
902
|
+
|
903
|
+
# Sort chat inputs first and sort each layer by dependencies
|
904
|
+
all_layers = [first_layer, *remaining_layers]
|
905
|
+
if get_vertex_predecessors is not None and start_component_id is None:
|
906
|
+
all_layers = sort_chat_inputs_first(all_layers, get_vertex_predecessors)
|
907
|
+
if get_vertex_successors is not None:
|
908
|
+
all_layers = sort_layer_by_dependency(all_layers, get_vertex_successors)
|
909
|
+
|
910
|
+
if not all_layers:
|
911
|
+
return [], []
|
912
|
+
|
913
|
+
return all_layers[0], all_layers[1:]
|
914
|
+
|
915
|
+
|
916
|
+
def filter_vertices_up_to_vertex(
|
917
|
+
vertices_ids: list[str],
|
918
|
+
vertex_id: str,
|
919
|
+
get_vertex_predecessors: Callable[[str], list[str]] | None = None,
|
920
|
+
get_vertex_successors: Callable[[str], list[str]] | None = None,
|
921
|
+
graph_dict: dict[str, Any] | None = None,
|
922
|
+
) -> set[str]:
|
923
|
+
"""Filter vertices up to a given vertex.
|
924
|
+
|
925
|
+
Args:
|
926
|
+
vertices_ids: List of vertex IDs to filter
|
927
|
+
vertex_id: ID of the vertex to filter up to
|
928
|
+
get_vertex_predecessors: Function to get predecessors of a vertex
|
929
|
+
get_vertex_successors: Function to get successors of a vertex
|
930
|
+
graph_dict: Dictionary containing graph information
|
931
|
+
parent_node_map: Map of vertex IDs to their parent node IDs
|
932
|
+
|
933
|
+
Returns:
|
934
|
+
Set of vertex IDs that are predecessors of the given vertex
|
935
|
+
"""
|
936
|
+
vertices_set = set(vertices_ids)
|
937
|
+
if vertex_id not in vertices_set:
|
938
|
+
return set()
|
939
|
+
|
940
|
+
# Build predecessor map if not provided
|
941
|
+
if get_vertex_predecessors is None:
|
942
|
+
if graph_dict is None:
|
943
|
+
msg = "Either get_vertex_predecessors or graph_dict must be provided"
|
944
|
+
raise ValueError(msg)
|
945
|
+
|
946
|
+
def get_vertex_predecessors(v):
|
947
|
+
return graph_dict[v]["predecessors"]
|
948
|
+
|
949
|
+
# Build successor map if not provided
|
950
|
+
if get_vertex_successors is None:
|
951
|
+
if graph_dict is None:
|
952
|
+
return set()
|
953
|
+
|
954
|
+
def get_vertex_successors(v):
|
955
|
+
return graph_dict[v]["successors"]
|
956
|
+
|
957
|
+
# Start with the target vertex
|
958
|
+
filtered_vertices = {vertex_id}
|
959
|
+
queue = deque([vertex_id])
|
960
|
+
|
961
|
+
# Process vertices in breadth-first order
|
962
|
+
while queue:
|
963
|
+
current_vertex = queue.popleft()
|
964
|
+
for predecessor in get_vertex_predecessors(current_vertex):
|
965
|
+
if predecessor in vertices_set and predecessor not in filtered_vertices:
|
966
|
+
filtered_vertices.add(predecessor)
|
967
|
+
queue.append(predecessor)
|
968
|
+
|
969
|
+
return filtered_vertices
|
970
|
+
|
971
|
+
|
972
|
+
def filter_vertices_from_vertex(
|
973
|
+
vertices_ids: list[str],
|
974
|
+
vertex_id: str,
|
975
|
+
get_vertex_predecessors: Callable[[str], list[str]] | None = None,
|
976
|
+
get_vertex_successors: Callable[[str], list[str]] | None = None,
|
977
|
+
graph_dict: dict[str, Any] | None = None,
|
978
|
+
) -> set[str]:
|
979
|
+
"""Filter vertices starting from a given vertex.
|
980
|
+
|
981
|
+
Args:
|
982
|
+
vertices_ids: List of vertex IDs to filter
|
983
|
+
vertex_id: ID of the vertex to start filtering from
|
984
|
+
get_vertex_predecessors: Function to get predecessors of a vertex
|
985
|
+
get_vertex_successors: Function to get successors of a vertex
|
986
|
+
graph_dict: Dictionary containing graph information
|
987
|
+
|
988
|
+
Returns:
|
989
|
+
Set of vertex IDs that are successors of the given vertex
|
990
|
+
"""
|
991
|
+
vertices_set = set(vertices_ids)
|
992
|
+
if vertex_id not in vertices_set:
|
993
|
+
return set()
|
994
|
+
|
995
|
+
# Build predecessor map if not provided
|
996
|
+
if get_vertex_predecessors is None:
|
997
|
+
if graph_dict is None:
|
998
|
+
msg = "Either get_vertex_predecessors or graph_dict must be provided"
|
999
|
+
raise ValueError(msg)
|
1000
|
+
|
1001
|
+
def get_vertex_predecessors(v):
|
1002
|
+
return graph_dict[v]["predecessors"]
|
1003
|
+
|
1004
|
+
# Build successor map if not provided
|
1005
|
+
if get_vertex_successors is None:
|
1006
|
+
if graph_dict is None:
|
1007
|
+
return set()
|
1008
|
+
|
1009
|
+
def get_vertex_successors(v):
|
1010
|
+
return graph_dict[v]["successors"]
|
1011
|
+
|
1012
|
+
# Start with the target vertex
|
1013
|
+
filtered_vertices = {vertex_id}
|
1014
|
+
queue = deque([vertex_id])
|
1015
|
+
|
1016
|
+
# Process vertices in breadth-first order
|
1017
|
+
while queue:
|
1018
|
+
current_vertex = queue.popleft()
|
1019
|
+
for successor in get_vertex_successors(current_vertex):
|
1020
|
+
if successor in vertices_set and successor not in filtered_vertices:
|
1021
|
+
filtered_vertices.add(successor)
|
1022
|
+
queue.append(successor)
|
1023
|
+
|
1024
|
+
return filtered_vertices
|