lfx-nightly 0.1.11.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +268 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +346 -0
- lfx/base/agents/utils.py +205 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +1291 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +685 -0
- lfx/base/data/docling_utils.py +245 -0
- lfx/base/data/utils.py +198 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +20 -0
- lfx/base/io/text.py +22 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1398 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +47 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/groq_constants.py +134 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +307 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +8 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +49 -0
- lfx/base/models/openai_constants.py +122 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +224 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +319 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +441 -0
- lfx/cli/script_loader.py +247 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +411 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/agents/__init__.py +34 -0
- lfx/components/agents/agent.py +558 -0
- lfx/components/agents/mcp_component.py +501 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +112 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +124 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +163 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +167 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/composio/__init__.py +74 -0
- lfx/components/composio/composio_api.py +268 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/slack_composio.py +582 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +107 -0
- lfx/components/crewai/hierarchical_crew.py +46 -0
- lfx/components/crewai/hierarchical_task.py +44 -0
- lfx/components/crewai/sequential_crew.py +52 -0
- lfx/components/crewai/sequential_task.py +73 -0
- lfx/components/crewai/sequential_task_agent.py +143 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +64 -0
- lfx/components/data/api_request.py +544 -0
- lfx/components/data/csv_to_data.py +95 -0
- lfx/components/data/directory.py +113 -0
- lfx/components/data/file.py +577 -0
- lfx/components/data/json_to_data.py +98 -0
- lfx/components/data/news_search.py +164 -0
- lfx/components/data/rss.py +69 -0
- lfx/components/data/sql_executor.py +101 -0
- lfx/components/data/url.py +311 -0
- lfx/components/data/web_search.py +112 -0
- lfx/components/data/webhook.py +56 -0
- lfx/components/datastax/__init__.py +70 -0
- lfx/components/datastax/astra_assistant_manager.py +306 -0
- lfx/components/datastax/astra_db.py +75 -0
- lfx/components/datastax/astra_vectorize.py +124 -0
- lfx/components/datastax/astradb.py +1285 -0
- lfx/components/datastax/astradb_cql.py +314 -0
- lfx/components/datastax/astradb_graph.py +330 -0
- lfx/components/datastax/astradb_tool.py +414 -0
- lfx/components/datastax/astradb_vectorstore.py +1285 -0
- lfx/components/datastax/cassandra.py +92 -0
- lfx/components/datastax/create_assistant.py +58 -0
- lfx/components/datastax/create_thread.py +32 -0
- lfx/components/datastax/dotenv.py +35 -0
- lfx/components/datastax/get_assistant.py +37 -0
- lfx/components/datastax/getenvvar.py +30 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +314 -0
- lfx/components/datastax/list_assistants.py +25 -0
- lfx/components/datastax/run.py +89 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +231 -0
- lfx/components/docling/docling_remote.py +193 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +243 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +76 -0
- lfx/components/embeddings/text_embedder.py +64 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +192 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +147 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +136 -0
- lfx/components/helpers/__init__.py +52 -0
- lfx/components/helpers/calculator_core.py +89 -0
- lfx/components/helpers/create_list.py +40 -0
- lfx/components/helpers/current_date.py +42 -0
- lfx/components/helpers/id_generator.py +42 -0
- lfx/components/helpers/memory.py +251 -0
- lfx/components/helpers/output_parser.py +45 -0
- lfx/components/helpers/store_message.py +90 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +197 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +203 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +38 -0
- lfx/components/input_output/chat.py +120 -0
- lfx/components/input_output/chat_output.py +200 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +107 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +45 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +129 -0
- lfx/components/logic/__init__.py +52 -0
- lfx/components/logic/conditional_router.py +171 -0
- lfx/components/logic/data_conditional_router.py +125 -0
- lfx/components/logic/flow_tool.py +110 -0
- lfx/components/logic/listen.py +29 -0
- lfx/components/logic/loop.py +125 -0
- lfx/components/logic/notify.py +88 -0
- lfx/components/logic/pass_message.py +35 -0
- lfx/components/logic/run_flow.py +71 -0
- lfx/components/logic/sub_flow.py +114 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +136 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +34 -0
- lfx/components/models/embedding_model.py +114 -0
- lfx/components/models/language_model.py +144 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +157 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +330 -0
- lfx/components/ollama/ollama_embeddings.py +106 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +202 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +117 -0
- lfx/components/processing/alter_metadata.py +108 -0
- lfx/components/processing/batch_run.py +205 -0
- lfx/components/processing/combine_text.py +39 -0
- lfx/components/processing/converter.py +159 -0
- lfx/components/processing/create_data.py +110 -0
- lfx/components/processing/data_operations.py +438 -0
- lfx/components/processing/data_to_dataframe.py +70 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/extract_key.py +53 -0
- lfx/components/processing/filter_data.py +42 -0
- lfx/components/processing/filter_data_values.py +88 -0
- lfx/components/processing/json_cleaner.py +103 -0
- lfx/components/processing/lambda_filter.py +154 -0
- lfx/components/processing/llm_router.py +499 -0
- lfx/components/processing/merge_data.py +90 -0
- lfx/components/processing/message_to_data.py +36 -0
- lfx/components/processing/parse_data.py +70 -0
- lfx/components/processing/parse_dataframe.py +68 -0
- lfx/components/processing/parse_json_data.py +90 -0
- lfx/components/processing/parser.py +143 -0
- lfx/components/processing/prompt.py +67 -0
- lfx/components/processing/python_repl_core.py +98 -0
- lfx/components/processing/regex.py +82 -0
- lfx/components/processing/save_file.py +225 -0
- lfx/components/processing/select_data.py +48 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/structured_output.py +202 -0
- lfx/components/processing/update_data.py +160 -0
- lfx/components/prototypes/__init__.py +34 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +72 -0
- lfx/components/tools/calculator.py +108 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +327 -0
- lfx/components/tools/python_repl.py +97 -0
- lfx/components/tools/search_api.py +87 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +119 -0
- lfx/components/tools/tavily_search_tool.py +344 -0
- lfx/components/tools/wikidata_api.py +102 -0
- lfx/components/tools/wikipedia_api.py +49 -0
- lfx/components/tools/yahoo_finance.py +129 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +291 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +179 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +40 -0
- lfx/components/vectorstores/astradb.py +1285 -0
- lfx/components/vectorstores/astradb_graph.py +319 -0
- lfx/components/vectorstores/cassandra.py +264 -0
- lfx/components/vectorstores/cassandra_graph.py +238 -0
- lfx/components/vectorstores/chroma.py +167 -0
- lfx/components/vectorstores/clickhouse.py +135 -0
- lfx/components/vectorstores/couchbase.py +102 -0
- lfx/components/vectorstores/elasticsearch.py +267 -0
- lfx/components/vectorstores/faiss.py +111 -0
- lfx/components/vectorstores/graph_rag.py +141 -0
- lfx/components/vectorstores/hcd.py +314 -0
- lfx/components/vectorstores/local_db.py +261 -0
- lfx/components/vectorstores/milvus.py +115 -0
- lfx/components/vectorstores/mongodb_atlas.py +213 -0
- lfx/components/vectorstores/opensearch.py +243 -0
- lfx/components/vectorstores/pgvector.py +72 -0
- lfx/components/vectorstores/pinecone.py +134 -0
- lfx/components/vectorstores/qdrant.py +109 -0
- lfx/components/vectorstores/supabase.py +76 -0
- lfx/components/vectorstores/upstash.py +124 -0
- lfx/components/vectorstores/vectara.py +97 -0
- lfx/components/vectorstores/vectara_rag.py +164 -0
- lfx/components/vectorstores/weaviate.py +89 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +118 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +44 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +86 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1808 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +588 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +488 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +215 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +277 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2238 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +52 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +237 -0
- lfx/graph/utils.py +200 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +823 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +264 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +1 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +194 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +328 -0
- lfx/inputs/inputs.py +714 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +489 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +224 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +289 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +385 -0
- lfx/memory/__init__.py +90 -0
- lfx/memory/stubs.py +283 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/data.py +308 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +131 -0
- lfx/schema/json_schema.py +141 -0
- lfx/schema/log.py +61 -0
- lfx/schema/message.py +473 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +171 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +140 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +23 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/deps.py +129 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +172 -0
- lfx/services/schema.py +20 -0
- lfx/services/session.py +82 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +130 -0
- lfx/services/settings/base.py +539 -0
- lfx/services/settings/constants.py +31 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +12 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +155 -0
- lfx/services/storage/service.py +54 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +257 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +212 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +205 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +28 -0
- lfx/utils/image.py +73 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/util.py +481 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.1.11.dev0.dist-info/METADATA +293 -0
- lfx_nightly-0.1.11.dev0.dist-info/RECORD +699 -0
- lfx_nightly-0.1.11.dev0.dist-info/WHEEL +4 -0
- lfx_nightly-0.1.11.dev0.dist-info/entry_points.txt +2 -0
lfx/base/mcp/util.py
ADDED
@@ -0,0 +1,1398 @@
|
|
1
|
+
import asyncio
|
2
|
+
import contextlib
|
3
|
+
import inspect
|
4
|
+
import os
|
5
|
+
import platform
|
6
|
+
import re
|
7
|
+
import shutil
|
8
|
+
import unicodedata
|
9
|
+
from collections.abc import Awaitable, Callable
|
10
|
+
from typing import Any
|
11
|
+
from urllib.parse import urlparse
|
12
|
+
from uuid import UUID
|
13
|
+
|
14
|
+
import httpx
|
15
|
+
from anyio import ClosedResourceError
|
16
|
+
from httpx import codes as httpx_codes
|
17
|
+
from langchain_core.tools import StructuredTool
|
18
|
+
from mcp import ClientSession
|
19
|
+
from mcp.shared.exceptions import McpError
|
20
|
+
from pydantic import BaseModel
|
21
|
+
|
22
|
+
from lfx.log.logger import logger
|
23
|
+
from lfx.schema.json_schema import create_input_schema_from_json_schema
|
24
|
+
from lfx.services.deps import get_settings_service
|
25
|
+
|
26
|
+
HTTP_ERROR_STATUS_CODE = httpx_codes.BAD_REQUEST # HTTP status code for client errors
|
27
|
+
|
28
|
+
# HTTP status codes used in validation
|
29
|
+
HTTP_NOT_FOUND = 404
|
30
|
+
HTTP_BAD_REQUEST = 400
|
31
|
+
HTTP_INTERNAL_SERVER_ERROR = 500
|
32
|
+
|
33
|
+
# MCP Session Manager constants
|
34
|
+
settings = get_settings_service().settings
|
35
|
+
MAX_SESSIONS_PER_SERVER = (
|
36
|
+
settings.mcp_max_sessions_per_server
|
37
|
+
) # Maximum number of sessions per server to prevent resource exhaustion
|
38
|
+
SESSION_IDLE_TIMEOUT = settings.mcp_session_idle_timeout # 5 minutes idle timeout for sessions
|
39
|
+
SESSION_CLEANUP_INTERVAL = settings.mcp_session_cleanup_interval # Cleanup interval in seconds
|
40
|
+
# RFC 7230 compliant header name pattern: token = 1*tchar
|
41
|
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
42
|
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
43
|
+
HEADER_NAME_PATTERN = re.compile(r"^[!#$%&\'*+\-.0-9A-Z^_`a-z|~]+$")
|
44
|
+
|
45
|
+
# Common allowed headers for MCP connections
|
46
|
+
ALLOWED_HEADERS = {
|
47
|
+
"authorization",
|
48
|
+
"accept",
|
49
|
+
"accept-encoding",
|
50
|
+
"accept-language",
|
51
|
+
"cache-control",
|
52
|
+
"content-type",
|
53
|
+
"user-agent",
|
54
|
+
"x-api-key",
|
55
|
+
"x-auth-token",
|
56
|
+
"x-custom-header",
|
57
|
+
"x-langflow-session",
|
58
|
+
"x-mcp-client",
|
59
|
+
"x-requested-with",
|
60
|
+
}
|
61
|
+
|
62
|
+
|
63
|
+
def validate_headers(headers: dict[str, str]) -> dict[str, str]:
|
64
|
+
"""Validate and sanitize HTTP headers according to RFC 7230.
|
65
|
+
|
66
|
+
Args:
|
67
|
+
headers: Dictionary of header name-value pairs
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
Dictionary of validated and sanitized headers
|
71
|
+
|
72
|
+
Raises:
|
73
|
+
ValueError: If headers contain invalid names or values
|
74
|
+
"""
|
75
|
+
if not headers:
|
76
|
+
return {}
|
77
|
+
|
78
|
+
sanitized_headers = {}
|
79
|
+
|
80
|
+
for name, value in headers.items():
|
81
|
+
if not isinstance(name, str) or not isinstance(value, str):
|
82
|
+
logger.warning(f"Skipping non-string header: {name}={value}")
|
83
|
+
continue
|
84
|
+
|
85
|
+
# Validate header name according to RFC 7230
|
86
|
+
if not HEADER_NAME_PATTERN.match(name):
|
87
|
+
logger.warning(f"Invalid header name '{name}', skipping")
|
88
|
+
continue
|
89
|
+
|
90
|
+
# Normalize header name to lowercase (HTTP headers are case-insensitive)
|
91
|
+
normalized_name = name.lower()
|
92
|
+
|
93
|
+
# Optional: Check against whitelist of allowed headers
|
94
|
+
if normalized_name not in ALLOWED_HEADERS:
|
95
|
+
# For MCP, we'll be permissive and allow non-standard headers
|
96
|
+
# but log a warning for security awareness
|
97
|
+
logger.debug(f"Using non-standard header: {normalized_name}")
|
98
|
+
|
99
|
+
# Check for potential header injection attempts BEFORE sanitizing
|
100
|
+
if "\r" in value or "\n" in value:
|
101
|
+
logger.warning(f"Potential header injection detected in '{name}', skipping")
|
102
|
+
continue
|
103
|
+
|
104
|
+
# Sanitize header value - remove control characters and newlines
|
105
|
+
# RFC 7230: field-value = *( field-content / obs-fold )
|
106
|
+
# We'll remove control characters (0x00-0x1F, 0x7F) except tab (0x09) and space (0x20)
|
107
|
+
sanitized_value = re.sub(r"[\x00-\x08\x0A-\x1F\x7F]", "", value)
|
108
|
+
|
109
|
+
# Remove leading/trailing whitespace
|
110
|
+
sanitized_value = sanitized_value.strip()
|
111
|
+
|
112
|
+
if not sanitized_value:
|
113
|
+
logger.warning(f"Header '{name}' has empty value after sanitization, skipping")
|
114
|
+
continue
|
115
|
+
|
116
|
+
sanitized_headers[normalized_name] = sanitized_value
|
117
|
+
|
118
|
+
return sanitized_headers
|
119
|
+
|
120
|
+
|
121
|
+
def sanitize_mcp_name(name: str, max_length: int = 46) -> str:
|
122
|
+
"""Sanitize a name for MCP usage by removing emojis, diacritics, and special characters.
|
123
|
+
|
124
|
+
Args:
|
125
|
+
name: The original name to sanitize
|
126
|
+
max_length: Maximum length for the sanitized name
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
A sanitized name containing only letters, numbers, hyphens, and underscores
|
130
|
+
"""
|
131
|
+
if not name or not name.strip():
|
132
|
+
return ""
|
133
|
+
|
134
|
+
# Remove emojis using regex pattern
|
135
|
+
emoji_pattern = re.compile(
|
136
|
+
"["
|
137
|
+
"\U0001f600-\U0001f64f" # emoticons
|
138
|
+
"\U0001f300-\U0001f5ff" # symbols & pictographs
|
139
|
+
"\U0001f680-\U0001f6ff" # transport & map symbols
|
140
|
+
"\U0001f1e0-\U0001f1ff" # flags (iOS)
|
141
|
+
"\U00002500-\U00002bef" # chinese char
|
142
|
+
"\U00002702-\U000027b0"
|
143
|
+
"\U00002702-\U000027b0"
|
144
|
+
"\U000024c2-\U0001f251"
|
145
|
+
"\U0001f926-\U0001f937"
|
146
|
+
"\U00010000-\U0010ffff"
|
147
|
+
"\u2640-\u2642"
|
148
|
+
"\u2600-\u2b55"
|
149
|
+
"\u200d"
|
150
|
+
"\u23cf"
|
151
|
+
"\u23e9"
|
152
|
+
"\u231a"
|
153
|
+
"\ufe0f" # dingbats
|
154
|
+
"\u3030"
|
155
|
+
"]+",
|
156
|
+
flags=re.UNICODE,
|
157
|
+
)
|
158
|
+
|
159
|
+
# Remove emojis
|
160
|
+
name = emoji_pattern.sub("", name)
|
161
|
+
|
162
|
+
# Normalize unicode characters to remove diacritics
|
163
|
+
name = unicodedata.normalize("NFD", name)
|
164
|
+
name = "".join(char for char in name if unicodedata.category(char) != "Mn")
|
165
|
+
|
166
|
+
# Replace spaces and special characters with underscores
|
167
|
+
name = re.sub(r"[^\w\s-]", "", name) # Keep only word chars, spaces, and hyphens
|
168
|
+
name = re.sub(r"[-\s]+", "_", name) # Replace spaces and hyphens with underscores
|
169
|
+
name = re.sub(r"_+", "_", name) # Collapse multiple underscores
|
170
|
+
|
171
|
+
# Remove leading/trailing underscores
|
172
|
+
name = name.strip("_")
|
173
|
+
|
174
|
+
# Ensure it starts with a letter or underscore (not a number)
|
175
|
+
if name and name[0].isdigit():
|
176
|
+
name = f"_{name}"
|
177
|
+
|
178
|
+
# Convert to lowercase
|
179
|
+
name = name.lower()
|
180
|
+
|
181
|
+
# Truncate to max length
|
182
|
+
if len(name) > max_length:
|
183
|
+
name = name[:max_length].rstrip("_")
|
184
|
+
|
185
|
+
# If empty after sanitization, provide a default
|
186
|
+
if not name:
|
187
|
+
name = "unnamed"
|
188
|
+
|
189
|
+
return name
|
190
|
+
|
191
|
+
|
192
|
+
def create_tool_coroutine(tool_name: str, arg_schema: type[BaseModel], client) -> Callable[..., Awaitable]:
|
193
|
+
async def tool_coroutine(*args, **kwargs):
|
194
|
+
# Get field names from the model (preserving order)
|
195
|
+
field_names = list(arg_schema.model_fields.keys())
|
196
|
+
provided_args = {}
|
197
|
+
# Map positional arguments to their corresponding field names
|
198
|
+
for i, arg in enumerate(args):
|
199
|
+
if i >= len(field_names):
|
200
|
+
msg = "Too many positional arguments provided"
|
201
|
+
raise ValueError(msg)
|
202
|
+
provided_args[field_names[i]] = arg
|
203
|
+
# Merge in keyword arguments
|
204
|
+
provided_args.update(kwargs)
|
205
|
+
# Validate input and fill defaults for missing optional fields
|
206
|
+
try:
|
207
|
+
validated = arg_schema.model_validate(provided_args)
|
208
|
+
except Exception as e:
|
209
|
+
msg = f"Invalid input: {e}"
|
210
|
+
raise ValueError(msg) from e
|
211
|
+
|
212
|
+
try:
|
213
|
+
return await client.run_tool(tool_name, arguments=validated.model_dump())
|
214
|
+
except Exception as e:
|
215
|
+
await logger.aerror(f"Tool '{tool_name}' execution failed: {e}")
|
216
|
+
# Re-raise with more context
|
217
|
+
msg = f"Tool '{tool_name}' execution failed: {e}"
|
218
|
+
raise ValueError(msg) from e
|
219
|
+
|
220
|
+
return tool_coroutine
|
221
|
+
|
222
|
+
|
223
|
+
def create_tool_func(tool_name: str, arg_schema: type[BaseModel], client) -> Callable[..., str]:
|
224
|
+
def tool_func(*args, **kwargs):
|
225
|
+
field_names = list(arg_schema.model_fields.keys())
|
226
|
+
provided_args = {}
|
227
|
+
for i, arg in enumerate(args):
|
228
|
+
if i >= len(field_names):
|
229
|
+
msg = "Too many positional arguments provided"
|
230
|
+
raise ValueError(msg)
|
231
|
+
provided_args[field_names[i]] = arg
|
232
|
+
provided_args.update(kwargs)
|
233
|
+
try:
|
234
|
+
validated = arg_schema.model_validate(provided_args)
|
235
|
+
except Exception as e:
|
236
|
+
msg = f"Invalid input: {e}"
|
237
|
+
raise ValueError(msg) from e
|
238
|
+
|
239
|
+
try:
|
240
|
+
loop = asyncio.get_event_loop()
|
241
|
+
return loop.run_until_complete(client.run_tool(tool_name, arguments=validated.model_dump()))
|
242
|
+
except Exception as e:
|
243
|
+
logger.error(f"Tool '{tool_name}' execution failed: {e}")
|
244
|
+
# Re-raise with more context
|
245
|
+
msg = f"Tool '{tool_name}' execution failed: {e}"
|
246
|
+
raise ValueError(msg) from e
|
247
|
+
|
248
|
+
return tool_func
|
249
|
+
|
250
|
+
|
251
|
+
def get_unique_name(base_name, max_length, existing_names):
|
252
|
+
name = base_name[:max_length]
|
253
|
+
if name not in existing_names:
|
254
|
+
return name
|
255
|
+
i = 1
|
256
|
+
while True:
|
257
|
+
suffix = f"_{i}"
|
258
|
+
truncated_base = base_name[: max_length - len(suffix)]
|
259
|
+
candidate = f"{truncated_base}{suffix}"
|
260
|
+
if candidate not in existing_names:
|
261
|
+
return candidate
|
262
|
+
i += 1
|
263
|
+
|
264
|
+
|
265
|
+
async def get_flow_snake_case(flow_name: str, user_id: str, session, *, is_action: bool | None = None):
|
266
|
+
try:
|
267
|
+
from langflow.services.database.models.flow.model import Flow
|
268
|
+
from sqlmodel import select
|
269
|
+
except ImportError as e:
|
270
|
+
msg = "Langflow Flow model is not available. This feature requires the full Langflow installation."
|
271
|
+
raise ImportError(msg) from e
|
272
|
+
|
273
|
+
uuid_user_id = UUID(user_id) if isinstance(user_id, str) else user_id
|
274
|
+
|
275
|
+
stmt = select(Flow).where(Flow.user_id == uuid_user_id).where(Flow.is_component == False) # noqa: E712
|
276
|
+
flows = (await session.exec(stmt)).all()
|
277
|
+
|
278
|
+
for flow in flows:
|
279
|
+
if is_action and flow.action_name:
|
280
|
+
this_flow_name = sanitize_mcp_name(flow.action_name)
|
281
|
+
else:
|
282
|
+
this_flow_name = sanitize_mcp_name(flow.name)
|
283
|
+
|
284
|
+
if this_flow_name == flow_name:
|
285
|
+
return flow
|
286
|
+
return None
|
287
|
+
|
288
|
+
|
289
|
+
def _is_valid_key_value_item(item: Any) -> bool:
|
290
|
+
"""Check if an item is a valid key-value dictionary."""
|
291
|
+
return isinstance(item, dict) and "key" in item and "value" in item
|
292
|
+
|
293
|
+
|
294
|
+
def _process_headers(headers: Any) -> dict:
|
295
|
+
"""Process the headers input into a valid dictionary.
|
296
|
+
|
297
|
+
Args:
|
298
|
+
headers: The headers to process, can be dict, str, or list
|
299
|
+
Returns:
|
300
|
+
Processed and validated dictionary
|
301
|
+
"""
|
302
|
+
if headers is None:
|
303
|
+
return {}
|
304
|
+
if isinstance(headers, dict):
|
305
|
+
return validate_headers(headers)
|
306
|
+
if isinstance(headers, list):
|
307
|
+
processed_headers = {}
|
308
|
+
try:
|
309
|
+
for item in headers:
|
310
|
+
if not _is_valid_key_value_item(item):
|
311
|
+
continue
|
312
|
+
key = item["key"]
|
313
|
+
value = item["value"]
|
314
|
+
processed_headers[key] = value
|
315
|
+
except (KeyError, TypeError, ValueError):
|
316
|
+
return {} # Return empty dictionary instead of None
|
317
|
+
return validate_headers(processed_headers)
|
318
|
+
return {}
|
319
|
+
|
320
|
+
|
321
|
+
def _validate_node_installation(command: str) -> str:
|
322
|
+
"""Validate the npx command."""
|
323
|
+
if "npx" in command and not shutil.which("node"):
|
324
|
+
msg = "Node.js is not installed. Please install Node.js to use npx commands."
|
325
|
+
raise ValueError(msg)
|
326
|
+
return command
|
327
|
+
|
328
|
+
|
329
|
+
async def _validate_connection_params(mode: str, command: str | None = None, url: str | None = None) -> None:
|
330
|
+
"""Validate connection parameters based on mode."""
|
331
|
+
if mode not in ["Stdio", "SSE"]:
|
332
|
+
msg = f"Invalid mode: {mode}. Must be either 'Stdio' or 'SSE'"
|
333
|
+
raise ValueError(msg)
|
334
|
+
|
335
|
+
if mode == "Stdio" and not command:
|
336
|
+
msg = "Command is required for Stdio mode"
|
337
|
+
raise ValueError(msg)
|
338
|
+
if mode == "Stdio" and command:
|
339
|
+
_validate_node_installation(command)
|
340
|
+
if mode == "SSE" and not url:
|
341
|
+
msg = "URL is required for SSE mode"
|
342
|
+
raise ValueError(msg)
|
343
|
+
|
344
|
+
|
345
|
+
class MCPSessionManager:
|
346
|
+
"""Manages persistent MCP sessions with proper context manager lifecycle.
|
347
|
+
|
348
|
+
Fixed version that addresses the memory leak issue by:
|
349
|
+
1. Session reuse based on server identity rather than unique context IDs
|
350
|
+
2. Maximum session limits per server to prevent resource exhaustion
|
351
|
+
3. Idle timeout for automatic session cleanup
|
352
|
+
4. Periodic cleanup of stale sessions
|
353
|
+
"""
|
354
|
+
|
355
|
+
def __init__(self):
|
356
|
+
# Structure: server_key -> {"sessions": {session_id: session_info}, "last_cleanup": timestamp}
|
357
|
+
self.sessions_by_server = {}
|
358
|
+
self._background_tasks = set() # Keep references to background tasks
|
359
|
+
# Backwards-compatibility maps: which context_id uses which (server_key, session_id)
|
360
|
+
self._context_to_session: dict[str, tuple[str, str]] = {}
|
361
|
+
# Reference count for each active (server_key, session_id)
|
362
|
+
self._session_refcount: dict[tuple[str, str], int] = {}
|
363
|
+
self._cleanup_task = None
|
364
|
+
self._start_cleanup_task()
|
365
|
+
|
366
|
+
def _start_cleanup_task(self):
|
367
|
+
"""Start the periodic cleanup task."""
|
368
|
+
if self._cleanup_task is None or self._cleanup_task.done():
|
369
|
+
self._cleanup_task = asyncio.create_task(self._periodic_cleanup())
|
370
|
+
self._background_tasks.add(self._cleanup_task)
|
371
|
+
self._cleanup_task.add_done_callback(self._background_tasks.discard)
|
372
|
+
|
373
|
+
async def _periodic_cleanup(self):
|
374
|
+
"""Periodically clean up idle sessions."""
|
375
|
+
while True:
|
376
|
+
try:
|
377
|
+
await asyncio.sleep(SESSION_CLEANUP_INTERVAL)
|
378
|
+
await self._cleanup_idle_sessions()
|
379
|
+
except asyncio.CancelledError:
|
380
|
+
break
|
381
|
+
except (RuntimeError, KeyError, ClosedResourceError, ValueError, asyncio.TimeoutError) as e:
|
382
|
+
# Handle common recoverable errors without stopping the cleanup loop
|
383
|
+
await logger.awarning(f"Error in periodic cleanup: {e}")
|
384
|
+
|
385
|
+
async def _cleanup_idle_sessions(self):
|
386
|
+
"""Clean up sessions that have been idle for too long."""
|
387
|
+
current_time = asyncio.get_event_loop().time()
|
388
|
+
servers_to_remove = []
|
389
|
+
|
390
|
+
for server_key, server_data in self.sessions_by_server.items():
|
391
|
+
sessions = server_data.get("sessions", {})
|
392
|
+
sessions_to_remove = []
|
393
|
+
|
394
|
+
for session_id, session_info in sessions.items():
|
395
|
+
if current_time - session_info["last_used"] > SESSION_IDLE_TIMEOUT:
|
396
|
+
sessions_to_remove.append(session_id)
|
397
|
+
|
398
|
+
# Clean up idle sessions
|
399
|
+
for session_id in sessions_to_remove:
|
400
|
+
await logger.ainfo(f"Cleaning up idle session {session_id} for server {server_key}")
|
401
|
+
await self._cleanup_session_by_id(server_key, session_id)
|
402
|
+
|
403
|
+
# Remove server entry if no sessions left
|
404
|
+
if not sessions:
|
405
|
+
servers_to_remove.append(server_key)
|
406
|
+
|
407
|
+
# Clean up empty server entries
|
408
|
+
for server_key in servers_to_remove:
|
409
|
+
del self.sessions_by_server[server_key]
|
410
|
+
|
411
|
+
def _get_server_key(self, connection_params, transport_type: str) -> str:
|
412
|
+
"""Generate a consistent server key based on connection parameters."""
|
413
|
+
if transport_type == "stdio":
|
414
|
+
if hasattr(connection_params, "command"):
|
415
|
+
# Include command, args, and environment for uniqueness
|
416
|
+
command_str = f"{connection_params.command} {' '.join(connection_params.args or [])}"
|
417
|
+
env_str = str(sorted((connection_params.env or {}).items()))
|
418
|
+
key_input = f"{command_str}|{env_str}"
|
419
|
+
return f"stdio_{hash(key_input)}"
|
420
|
+
elif transport_type == "sse" and (isinstance(connection_params, dict) and "url" in connection_params):
|
421
|
+
# Include URL and headers for uniqueness
|
422
|
+
url = connection_params["url"]
|
423
|
+
headers = str(sorted((connection_params.get("headers", {})).items()))
|
424
|
+
key_input = f"{url}|{headers}"
|
425
|
+
return f"sse_{hash(key_input)}"
|
426
|
+
|
427
|
+
# Fallback to a generic key
|
428
|
+
# TODO: add option for streamable HTTP in future.
|
429
|
+
return f"{transport_type}_{hash(str(connection_params))}"
|
430
|
+
|
431
|
+
async def _validate_session_connectivity(self, session) -> bool:
|
432
|
+
"""Validate that the session is actually usable by testing a simple operation."""
|
433
|
+
try:
|
434
|
+
# Try to list tools as a connectivity test (this is a lightweight operation)
|
435
|
+
# Use a shorter timeout for the connectivity test to fail fast
|
436
|
+
response = await asyncio.wait_for(session.list_tools(), timeout=3.0)
|
437
|
+
except (asyncio.TimeoutError, ConnectionError, OSError, ValueError) as e:
|
438
|
+
await logger.adebug(f"Session connectivity test failed (standard error): {e}")
|
439
|
+
return False
|
440
|
+
except Exception as e:
|
441
|
+
# Handle MCP-specific errors that might not be in the standard list
|
442
|
+
error_str = str(e)
|
443
|
+
if (
|
444
|
+
"ClosedResourceError" in str(type(e))
|
445
|
+
or "Connection closed" in error_str
|
446
|
+
or "Connection lost" in error_str
|
447
|
+
or "Connection failed" in error_str
|
448
|
+
or "Transport closed" in error_str
|
449
|
+
or "Stream closed" in error_str
|
450
|
+
):
|
451
|
+
await logger.adebug(f"Session connectivity test failed (MCP connection error): {e}")
|
452
|
+
return False
|
453
|
+
# Re-raise unexpected errors
|
454
|
+
await logger.awarning(f"Unexpected error in connectivity test: {e}")
|
455
|
+
raise
|
456
|
+
else:
|
457
|
+
# Validate that we got a meaningful response
|
458
|
+
if response is None:
|
459
|
+
await logger.adebug("Session connectivity test failed: received None response")
|
460
|
+
return False
|
461
|
+
try:
|
462
|
+
# Check if we can access the tools list (even if empty)
|
463
|
+
tools = getattr(response, "tools", None)
|
464
|
+
if tools is None:
|
465
|
+
await logger.adebug("Session connectivity test failed: no tools attribute in response")
|
466
|
+
return False
|
467
|
+
except (AttributeError, TypeError) as e:
|
468
|
+
await logger.adebug(f"Session connectivity test failed while validating response: {e}")
|
469
|
+
return False
|
470
|
+
else:
|
471
|
+
await logger.adebug(f"Session connectivity test passed: found {len(tools)} tools")
|
472
|
+
return True
|
473
|
+
|
474
|
+
async def get_session(self, context_id: str, connection_params, transport_type: str):
|
475
|
+
"""Get or create a session with improved reuse strategy.
|
476
|
+
|
477
|
+
The key insight is that we should reuse sessions based on the server
|
478
|
+
identity (command + args for stdio, URL for SSE) rather than the context_id.
|
479
|
+
This prevents creating a new subprocess for each unique context.
|
480
|
+
"""
|
481
|
+
server_key = self._get_server_key(connection_params, transport_type)
|
482
|
+
|
483
|
+
# Ensure server entry exists
|
484
|
+
if server_key not in self.sessions_by_server:
|
485
|
+
self.sessions_by_server[server_key] = {"sessions": {}, "last_cleanup": asyncio.get_event_loop().time()}
|
486
|
+
|
487
|
+
server_data = self.sessions_by_server[server_key]
|
488
|
+
sessions = server_data["sessions"]
|
489
|
+
|
490
|
+
# Try to find a healthy existing session
|
491
|
+
for session_id, session_info in sessions.items():
|
492
|
+
session = session_info["session"]
|
493
|
+
task = session_info["task"]
|
494
|
+
|
495
|
+
# Check if session is still alive
|
496
|
+
if not task.done():
|
497
|
+
# Update last used time
|
498
|
+
session_info["last_used"] = asyncio.get_event_loop().time()
|
499
|
+
|
500
|
+
# Quick health check
|
501
|
+
if await self._validate_session_connectivity(session):
|
502
|
+
await logger.adebug(f"Reusing existing session {session_id} for server {server_key}")
|
503
|
+
# record mapping & bump ref-count for backwards compatibility
|
504
|
+
self._context_to_session[context_id] = (server_key, session_id)
|
505
|
+
self._session_refcount[(server_key, session_id)] = (
|
506
|
+
self._session_refcount.get((server_key, session_id), 0) + 1
|
507
|
+
)
|
508
|
+
return session
|
509
|
+
await logger.ainfo(f"Session {session_id} for server {server_key} failed health check, cleaning up")
|
510
|
+
await self._cleanup_session_by_id(server_key, session_id)
|
511
|
+
else:
|
512
|
+
# Task is done, clean up
|
513
|
+
await logger.ainfo(f"Session {session_id} for server {server_key} task is done, cleaning up")
|
514
|
+
await self._cleanup_session_by_id(server_key, session_id)
|
515
|
+
|
516
|
+
# Check if we've reached the maximum number of sessions for this server
|
517
|
+
if len(sessions) >= MAX_SESSIONS_PER_SERVER:
|
518
|
+
# Remove the oldest session
|
519
|
+
oldest_session_id = min(sessions.keys(), key=lambda x: sessions[x]["last_used"])
|
520
|
+
await logger.ainfo(
|
521
|
+
f"Maximum sessions reached for server {server_key}, removing oldest session {oldest_session_id}"
|
522
|
+
)
|
523
|
+
await self._cleanup_session_by_id(server_key, oldest_session_id)
|
524
|
+
|
525
|
+
# Create new session
|
526
|
+
session_id = f"{server_key}_{len(sessions)}"
|
527
|
+
await logger.ainfo(f"Creating new session {session_id} for server {server_key}")
|
528
|
+
|
529
|
+
if transport_type == "stdio":
|
530
|
+
session, task = await self._create_stdio_session(session_id, connection_params)
|
531
|
+
elif transport_type == "sse":
|
532
|
+
session, task = await self._create_sse_session(session_id, connection_params)
|
533
|
+
else:
|
534
|
+
msg = f"Unknown transport type: {transport_type}"
|
535
|
+
raise ValueError(msg)
|
536
|
+
|
537
|
+
# Store session info
|
538
|
+
sessions[session_id] = {
|
539
|
+
"session": session,
|
540
|
+
"task": task,
|
541
|
+
"type": transport_type,
|
542
|
+
"last_used": asyncio.get_event_loop().time(),
|
543
|
+
}
|
544
|
+
|
545
|
+
# register mapping & initial ref-count for the new session
|
546
|
+
self._context_to_session[context_id] = (server_key, session_id)
|
547
|
+
self._session_refcount[(server_key, session_id)] = 1
|
548
|
+
|
549
|
+
return session
|
550
|
+
|
551
|
+
async def _create_stdio_session(self, session_id: str, connection_params):
|
552
|
+
"""Create a new stdio session as a background task to avoid context issues."""
|
553
|
+
import asyncio
|
554
|
+
|
555
|
+
from mcp.client.stdio import stdio_client
|
556
|
+
|
557
|
+
# Create a future to get the session
|
558
|
+
session_future: asyncio.Future[ClientSession] = asyncio.Future()
|
559
|
+
|
560
|
+
async def session_task():
|
561
|
+
"""Background task that keeps the session alive."""
|
562
|
+
try:
|
563
|
+
async with stdio_client(connection_params) as (read, write):
|
564
|
+
session = ClientSession(read, write)
|
565
|
+
async with session:
|
566
|
+
await session.initialize()
|
567
|
+
# Signal that session is ready
|
568
|
+
session_future.set_result(session)
|
569
|
+
|
570
|
+
# Keep the session alive until cancelled
|
571
|
+
import anyio
|
572
|
+
|
573
|
+
event = anyio.Event()
|
574
|
+
try:
|
575
|
+
await event.wait()
|
576
|
+
except asyncio.CancelledError:
|
577
|
+
await logger.ainfo(f"Session {session_id} is shutting down")
|
578
|
+
except Exception as e: # noqa: BLE001
|
579
|
+
if not session_future.done():
|
580
|
+
session_future.set_exception(e)
|
581
|
+
|
582
|
+
# Start the background task
|
583
|
+
task = asyncio.create_task(session_task())
|
584
|
+
self._background_tasks.add(task)
|
585
|
+
task.add_done_callback(self._background_tasks.discard)
|
586
|
+
|
587
|
+
# Wait for session to be ready
|
588
|
+
try:
|
589
|
+
session = await asyncio.wait_for(session_future, timeout=10.0)
|
590
|
+
except asyncio.TimeoutError as timeout_err:
|
591
|
+
# Clean up the failed task
|
592
|
+
if not task.done():
|
593
|
+
task.cancel()
|
594
|
+
import contextlib
|
595
|
+
|
596
|
+
with contextlib.suppress(asyncio.CancelledError):
|
597
|
+
await task
|
598
|
+
self._background_tasks.discard(task)
|
599
|
+
msg = f"Timeout waiting for STDIO session {session_id} to initialize"
|
600
|
+
await logger.aerror(msg)
|
601
|
+
raise ValueError(msg) from timeout_err
|
602
|
+
|
603
|
+
return session, task
|
604
|
+
|
605
|
+
async def _create_sse_session(self, session_id: str, connection_params):
|
606
|
+
"""Create a new SSE session as a background task to avoid context issues."""
|
607
|
+
import asyncio
|
608
|
+
|
609
|
+
from mcp.client.sse import sse_client
|
610
|
+
|
611
|
+
# Create a future to get the session
|
612
|
+
session_future: asyncio.Future[ClientSession] = asyncio.Future()
|
613
|
+
|
614
|
+
async def session_task():
|
615
|
+
"""Background task that keeps the session alive."""
|
616
|
+
try:
|
617
|
+
async with sse_client(
|
618
|
+
connection_params["url"],
|
619
|
+
connection_params["headers"],
|
620
|
+
connection_params["timeout_seconds"],
|
621
|
+
connection_params["sse_read_timeout_seconds"],
|
622
|
+
) as (read, write):
|
623
|
+
session = ClientSession(read, write)
|
624
|
+
async with session:
|
625
|
+
await session.initialize()
|
626
|
+
# Signal that session is ready
|
627
|
+
session_future.set_result(session)
|
628
|
+
|
629
|
+
# Keep the session alive until cancelled
|
630
|
+
import anyio
|
631
|
+
|
632
|
+
event = anyio.Event()
|
633
|
+
try:
|
634
|
+
await event.wait()
|
635
|
+
except asyncio.CancelledError:
|
636
|
+
await logger.ainfo(f"Session {session_id} is shutting down")
|
637
|
+
except Exception as e: # noqa: BLE001
|
638
|
+
if not session_future.done():
|
639
|
+
session_future.set_exception(e)
|
640
|
+
|
641
|
+
# Start the background task
|
642
|
+
task = asyncio.create_task(session_task())
|
643
|
+
self._background_tasks.add(task)
|
644
|
+
task.add_done_callback(self._background_tasks.discard)
|
645
|
+
|
646
|
+
# Wait for session to be ready
|
647
|
+
try:
|
648
|
+
session = await asyncio.wait_for(session_future, timeout=10.0)
|
649
|
+
except asyncio.TimeoutError as timeout_err:
|
650
|
+
# Clean up the failed task
|
651
|
+
if not task.done():
|
652
|
+
task.cancel()
|
653
|
+
import contextlib
|
654
|
+
|
655
|
+
with contextlib.suppress(asyncio.CancelledError):
|
656
|
+
await task
|
657
|
+
self._background_tasks.discard(task)
|
658
|
+
msg = f"Timeout waiting for SSE session {session_id} to initialize"
|
659
|
+
await logger.aerror(msg)
|
660
|
+
raise ValueError(msg) from timeout_err
|
661
|
+
|
662
|
+
return session, task
|
663
|
+
|
664
|
+
async def _cleanup_session_by_id(self, server_key: str, session_id: str):
|
665
|
+
"""Clean up a specific session by server key and session ID."""
|
666
|
+
if server_key not in self.sessions_by_server:
|
667
|
+
return
|
668
|
+
|
669
|
+
server_data = self.sessions_by_server[server_key]
|
670
|
+
# Handle both old and new session structure
|
671
|
+
if isinstance(server_data, dict) and "sessions" in server_data:
|
672
|
+
sessions = server_data["sessions"]
|
673
|
+
else:
|
674
|
+
# Handle old structure where sessions were stored directly
|
675
|
+
sessions = server_data
|
676
|
+
|
677
|
+
if session_id not in sessions:
|
678
|
+
return
|
679
|
+
|
680
|
+
session_info = sessions[session_id]
|
681
|
+
try:
|
682
|
+
# First try to properly close the session if it exists
|
683
|
+
if "session" in session_info:
|
684
|
+
session = session_info["session"]
|
685
|
+
|
686
|
+
# Try async close first (aclose method)
|
687
|
+
if hasattr(session, "aclose"):
|
688
|
+
try:
|
689
|
+
await session.aclose()
|
690
|
+
await logger.adebug("Successfully closed session %s using aclose()", session_id)
|
691
|
+
except Exception as e: # noqa: BLE001
|
692
|
+
await logger.adebug("Error closing session %s with aclose(): %s", session_id, e)
|
693
|
+
|
694
|
+
# If no aclose, try regular close method
|
695
|
+
elif hasattr(session, "close"):
|
696
|
+
try:
|
697
|
+
# Check if close() is awaitable using inspection
|
698
|
+
if inspect.iscoroutinefunction(session.close):
|
699
|
+
# It's an async method
|
700
|
+
await session.close()
|
701
|
+
await logger.adebug("Successfully closed session %s using async close()", session_id)
|
702
|
+
else:
|
703
|
+
# Try calling it and check if result is awaitable
|
704
|
+
close_result = session.close()
|
705
|
+
if inspect.isawaitable(close_result):
|
706
|
+
await close_result
|
707
|
+
await logger.adebug(
|
708
|
+
"Successfully closed session %s using awaitable close()", session_id
|
709
|
+
)
|
710
|
+
else:
|
711
|
+
# It's a synchronous close
|
712
|
+
await logger.adebug("Successfully closed session %s using sync close()", session_id)
|
713
|
+
except Exception as e: # noqa: BLE001
|
714
|
+
await logger.adebug("Error closing session %s with close(): %s", session_id, e)
|
715
|
+
|
716
|
+
# Cancel the background task which will properly close the session
|
717
|
+
if "task" in session_info:
|
718
|
+
task = session_info["task"]
|
719
|
+
if not task.done():
|
720
|
+
task.cancel()
|
721
|
+
try:
|
722
|
+
await task
|
723
|
+
except asyncio.CancelledError:
|
724
|
+
await logger.ainfo(f"Cancelled task for session {session_id}")
|
725
|
+
except Exception as e: # noqa: BLE001
|
726
|
+
await logger.awarning(f"Error cleaning up session {session_id}: {e}")
|
727
|
+
finally:
|
728
|
+
# Remove from sessions dict
|
729
|
+
del sessions[session_id]
|
730
|
+
|
731
|
+
async def cleanup_all(self):
|
732
|
+
"""Clean up all sessions."""
|
733
|
+
# Cancel periodic cleanup task
|
734
|
+
if self._cleanup_task and not self._cleanup_task.done():
|
735
|
+
self._cleanup_task.cancel()
|
736
|
+
with contextlib.suppress(asyncio.CancelledError):
|
737
|
+
await self._cleanup_task
|
738
|
+
|
739
|
+
# Clean up all sessions
|
740
|
+
for server_key in list(self.sessions_by_server.keys()):
|
741
|
+
server_data = self.sessions_by_server[server_key]
|
742
|
+
# Handle both old and new session structure
|
743
|
+
if isinstance(server_data, dict) and "sessions" in server_data:
|
744
|
+
sessions = server_data["sessions"]
|
745
|
+
else:
|
746
|
+
# Handle old structure where sessions were stored directly
|
747
|
+
sessions = server_data
|
748
|
+
|
749
|
+
for session_id in list(sessions.keys()):
|
750
|
+
await self._cleanup_session_by_id(server_key, session_id)
|
751
|
+
|
752
|
+
# Clear the sessions_by_server structure completely
|
753
|
+
self.sessions_by_server.clear()
|
754
|
+
|
755
|
+
# Clear compatibility maps
|
756
|
+
self._context_to_session.clear()
|
757
|
+
self._session_refcount.clear()
|
758
|
+
|
759
|
+
# Clear all background tasks
|
760
|
+
for task in list(self._background_tasks):
|
761
|
+
if not task.done():
|
762
|
+
task.cancel()
|
763
|
+
with contextlib.suppress(asyncio.CancelledError):
|
764
|
+
await task
|
765
|
+
|
766
|
+
# Give a bit more time for subprocess transports to clean up
|
767
|
+
# This helps prevent the BaseSubprocessTransport.__del__ warnings
|
768
|
+
await asyncio.sleep(0.5)
|
769
|
+
|
770
|
+
async def _cleanup_session(self, context_id: str):
|
771
|
+
"""Backward-compat cleanup by context_id.
|
772
|
+
|
773
|
+
Decrements the ref-count for the session used by *context_id* and only
|
774
|
+
tears the session down when the last context that references it goes
|
775
|
+
away.
|
776
|
+
"""
|
777
|
+
mapping = self._context_to_session.get(context_id)
|
778
|
+
if not mapping:
|
779
|
+
await logger.adebug(f"No session mapping found for context_id {context_id}")
|
780
|
+
return
|
781
|
+
|
782
|
+
server_key, session_id = mapping
|
783
|
+
ref_key = (server_key, session_id)
|
784
|
+
remaining = self._session_refcount.get(ref_key, 1) - 1
|
785
|
+
|
786
|
+
if remaining <= 0:
|
787
|
+
await self._cleanup_session_by_id(server_key, session_id)
|
788
|
+
self._session_refcount.pop(ref_key, None)
|
789
|
+
else:
|
790
|
+
self._session_refcount[ref_key] = remaining
|
791
|
+
|
792
|
+
# Remove the mapping for this context
|
793
|
+
self._context_to_session.pop(context_id, None)
|
794
|
+
|
795
|
+
|
796
|
+
class MCPStdioClient:
|
797
|
+
def __init__(self, component_cache=None):
|
798
|
+
self.session: ClientSession | None = None
|
799
|
+
self._connection_params = None
|
800
|
+
self._connected = False
|
801
|
+
self._session_context: str | None = None
|
802
|
+
self._component_cache = component_cache
|
803
|
+
|
804
|
+
async def _connect_to_server(self, command_str: str, env: dict[str, str] | None = None) -> list[StructuredTool]:
|
805
|
+
"""Connect to MCP server using stdio transport (SDK style)."""
|
806
|
+
from mcp import StdioServerParameters
|
807
|
+
|
808
|
+
command = command_str.split(" ")
|
809
|
+
env_data: dict[str, str] = {"DEBUG": "true", "PATH": os.environ["PATH"], **(env or {})}
|
810
|
+
|
811
|
+
if platform.system() == "Windows":
|
812
|
+
server_params = StdioServerParameters(
|
813
|
+
command="cmd",
|
814
|
+
args=[
|
815
|
+
"/c",
|
816
|
+
f"{command[0]} {' '.join(command[1:])} || echo Command failed with exit code %errorlevel% 1>&2",
|
817
|
+
],
|
818
|
+
env=env_data,
|
819
|
+
)
|
820
|
+
else:
|
821
|
+
server_params = StdioServerParameters(
|
822
|
+
command="bash",
|
823
|
+
args=["-c", f"exec {command_str} || echo 'Command failed with exit code $?' >&2"],
|
824
|
+
env=env_data,
|
825
|
+
)
|
826
|
+
|
827
|
+
# Store connection parameters for later use in run_tool
|
828
|
+
self._connection_params = server_params
|
829
|
+
|
830
|
+
# If no session context is set, create a default one
|
831
|
+
if not self._session_context:
|
832
|
+
# Generate a fallback context based on connection parameters
|
833
|
+
import uuid
|
834
|
+
|
835
|
+
param_hash = uuid.uuid4().hex[:8]
|
836
|
+
self._session_context = f"default_{param_hash}"
|
837
|
+
|
838
|
+
# Get or create a persistent session
|
839
|
+
session = await self._get_or_create_session()
|
840
|
+
response = await session.list_tools()
|
841
|
+
self._connected = True
|
842
|
+
return response.tools
|
843
|
+
|
844
|
+
async def connect_to_server(self, command_str: str, env: dict[str, str] | None = None) -> list[StructuredTool]:
|
845
|
+
"""Connect to MCP server using stdio transport (SDK style)."""
|
846
|
+
return await asyncio.wait_for(
|
847
|
+
self._connect_to_server(command_str, env), timeout=get_settings_service().settings.mcp_server_timeout
|
848
|
+
)
|
849
|
+
|
850
|
+
def set_session_context(self, context_id: str):
|
851
|
+
"""Set the session context (e.g., flow_id + user_id + session_id)."""
|
852
|
+
self._session_context = context_id
|
853
|
+
|
854
|
+
def _get_session_manager(self) -> MCPSessionManager:
|
855
|
+
"""Get or create session manager from component cache."""
|
856
|
+
if not self._component_cache:
|
857
|
+
# Fallback to instance-level session manager if no cache
|
858
|
+
if not hasattr(self, "_session_manager"):
|
859
|
+
self._session_manager = MCPSessionManager()
|
860
|
+
return self._session_manager
|
861
|
+
|
862
|
+
from lfx.services.cache.utils import CacheMiss
|
863
|
+
|
864
|
+
session_manager = self._component_cache.get("mcp_session_manager")
|
865
|
+
if isinstance(session_manager, CacheMiss):
|
866
|
+
session_manager = MCPSessionManager()
|
867
|
+
self._component_cache.set("mcp_session_manager", session_manager)
|
868
|
+
return session_manager
|
869
|
+
|
870
|
+
async def _get_or_create_session(self) -> ClientSession:
|
871
|
+
"""Get or create a persistent session for the current context."""
|
872
|
+
if not self._session_context or not self._connection_params:
|
873
|
+
msg = "Session context and connection params must be set"
|
874
|
+
raise ValueError(msg)
|
875
|
+
|
876
|
+
# Use cached session manager to get/create persistent session
|
877
|
+
session_manager = self._get_session_manager()
|
878
|
+
return await session_manager.get_session(self._session_context, self._connection_params, "stdio")
|
879
|
+
|
880
|
+
async def run_tool(self, tool_name: str, arguments: dict[str, Any]) -> Any:
|
881
|
+
"""Run a tool with the given arguments using context-specific session.
|
882
|
+
|
883
|
+
Args:
|
884
|
+
tool_name: Name of the tool to run
|
885
|
+
arguments: Dictionary of arguments to pass to the tool
|
886
|
+
|
887
|
+
Returns:
|
888
|
+
The result of the tool execution
|
889
|
+
|
890
|
+
Raises:
|
891
|
+
ValueError: If session is not initialized or tool execution fails
|
892
|
+
"""
|
893
|
+
if not self._connected or not self._connection_params:
|
894
|
+
msg = "Session not initialized or disconnected. Call connect_to_server first."
|
895
|
+
raise ValueError(msg)
|
896
|
+
|
897
|
+
# If no session context is set, create a default one
|
898
|
+
if not self._session_context:
|
899
|
+
# Generate a fallback context based on connection parameters
|
900
|
+
import uuid
|
901
|
+
|
902
|
+
param_hash = uuid.uuid4().hex[:8]
|
903
|
+
self._session_context = f"default_{param_hash}"
|
904
|
+
|
905
|
+
max_retries = 2
|
906
|
+
last_error_type = None
|
907
|
+
|
908
|
+
for attempt in range(max_retries):
|
909
|
+
try:
|
910
|
+
await logger.adebug(f"Attempting to run tool '{tool_name}' (attempt {attempt + 1}/{max_retries})")
|
911
|
+
# Get or create persistent session
|
912
|
+
session = await self._get_or_create_session()
|
913
|
+
|
914
|
+
result = await asyncio.wait_for(
|
915
|
+
session.call_tool(tool_name, arguments=arguments),
|
916
|
+
timeout=30.0, # 30 second timeout
|
917
|
+
)
|
918
|
+
except Exception as e:
|
919
|
+
current_error_type = type(e).__name__
|
920
|
+
await logger.awarning(f"Tool '{tool_name}' failed on attempt {attempt + 1}: {current_error_type} - {e}")
|
921
|
+
|
922
|
+
# Import specific MCP error types for detection
|
923
|
+
try:
|
924
|
+
is_closed_resource_error = isinstance(e, ClosedResourceError)
|
925
|
+
is_mcp_connection_error = isinstance(e, McpError) and "Connection closed" in str(e)
|
926
|
+
except ImportError:
|
927
|
+
is_closed_resource_error = "ClosedResourceError" in str(type(e))
|
928
|
+
is_mcp_connection_error = "Connection closed" in str(e)
|
929
|
+
|
930
|
+
# Detect timeout errors
|
931
|
+
is_timeout_error = isinstance(e, asyncio.TimeoutError | TimeoutError)
|
932
|
+
|
933
|
+
# If we're getting the same error type repeatedly, don't retry
|
934
|
+
if last_error_type == current_error_type and attempt > 0:
|
935
|
+
await logger.aerror(f"Repeated {current_error_type} error for tool '{tool_name}', not retrying")
|
936
|
+
break
|
937
|
+
|
938
|
+
last_error_type = current_error_type
|
939
|
+
|
940
|
+
# If it's a connection error (ClosedResourceError or MCP connection closed) and we have retries left
|
941
|
+
if (is_closed_resource_error or is_mcp_connection_error) and attempt < max_retries - 1:
|
942
|
+
await logger.awarning(
|
943
|
+
f"MCP session connection issue for tool '{tool_name}', retrying with fresh session..."
|
944
|
+
)
|
945
|
+
# Clean up the dead session
|
946
|
+
if self._session_context:
|
947
|
+
session_manager = self._get_session_manager()
|
948
|
+
await session_manager._cleanup_session(self._session_context)
|
949
|
+
# Add a small delay before retry
|
950
|
+
await asyncio.sleep(0.5)
|
951
|
+
continue
|
952
|
+
|
953
|
+
# If it's a timeout error and we have retries left, try once more
|
954
|
+
if is_timeout_error and attempt < max_retries - 1:
|
955
|
+
await logger.awarning(f"Tool '{tool_name}' timed out, retrying...")
|
956
|
+
# Don't clean up session for timeouts, might just be a slow response
|
957
|
+
await asyncio.sleep(1.0)
|
958
|
+
continue
|
959
|
+
|
960
|
+
# For other errors or no retries left, handle as before
|
961
|
+
if (
|
962
|
+
isinstance(e, ConnectionError | TimeoutError | OSError | ValueError)
|
963
|
+
or is_closed_resource_error
|
964
|
+
or is_mcp_connection_error
|
965
|
+
or is_timeout_error
|
966
|
+
):
|
967
|
+
msg = f"Failed to run tool '{tool_name}' after {attempt + 1} attempts: {e}"
|
968
|
+
await logger.aerror(msg)
|
969
|
+
# Clean up failed session from cache
|
970
|
+
if self._session_context and self._component_cache:
|
971
|
+
cache_key = f"mcp_session_stdio_{self._session_context}"
|
972
|
+
self._component_cache.delete(cache_key)
|
973
|
+
self._connected = False
|
974
|
+
raise ValueError(msg) from e
|
975
|
+
# Re-raise unexpected errors
|
976
|
+
raise
|
977
|
+
else:
|
978
|
+
await logger.adebug(f"Tool '{tool_name}' completed successfully")
|
979
|
+
return result
|
980
|
+
|
981
|
+
# This should never be reached due to the exception handling above
|
982
|
+
msg = f"Failed to run tool '{tool_name}': Maximum retries exceeded with repeated {last_error_type} errors"
|
983
|
+
await logger.aerror(msg)
|
984
|
+
raise ValueError(msg)
|
985
|
+
|
986
|
+
async def disconnect(self):
|
987
|
+
"""Properly close the connection and clean up resources."""
|
988
|
+
# For stdio transport, there is no remote session to terminate explicitly
|
989
|
+
# The session cleanup happens when the background task is cancelled
|
990
|
+
|
991
|
+
# Clean up local session using the session manager
|
992
|
+
if self._session_context:
|
993
|
+
session_manager = self._get_session_manager()
|
994
|
+
await session_manager._cleanup_session(self._session_context)
|
995
|
+
|
996
|
+
# Reset local state
|
997
|
+
self.session = None
|
998
|
+
self._connection_params = None
|
999
|
+
self._connected = False
|
1000
|
+
self._session_context = None
|
1001
|
+
|
1002
|
+
async def __aenter__(self):
|
1003
|
+
return self
|
1004
|
+
|
1005
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
1006
|
+
await self.disconnect()
|
1007
|
+
|
1008
|
+
|
1009
|
+
class MCPSseClient:
|
1010
|
+
def __init__(self, component_cache=None):
|
1011
|
+
self.session: ClientSession | None = None
|
1012
|
+
self._connection_params = None
|
1013
|
+
self._connected = False
|
1014
|
+
self._session_context: str | None = None
|
1015
|
+
self._component_cache = component_cache
|
1016
|
+
|
1017
|
+
def _get_session_manager(self) -> MCPSessionManager:
|
1018
|
+
"""Get or create session manager from component cache."""
|
1019
|
+
if not self._component_cache:
|
1020
|
+
# Fallback to instance-level session manager if no cache
|
1021
|
+
if not hasattr(self, "_session_manager"):
|
1022
|
+
self._session_manager = MCPSessionManager()
|
1023
|
+
return self._session_manager
|
1024
|
+
|
1025
|
+
from lfx.services.cache.utils import CacheMiss
|
1026
|
+
|
1027
|
+
session_manager = self._component_cache.get("mcp_session_manager")
|
1028
|
+
if isinstance(session_manager, CacheMiss):
|
1029
|
+
session_manager = MCPSessionManager()
|
1030
|
+
self._component_cache.set("mcp_session_manager", session_manager)
|
1031
|
+
return session_manager
|
1032
|
+
|
1033
|
+
async def validate_url(self, url: str | None, headers: dict[str, str] | None = None) -> tuple[bool, str]:
|
1034
|
+
"""Validate the SSE URL before attempting connection."""
|
1035
|
+
try:
|
1036
|
+
parsed = urlparse(url)
|
1037
|
+
if not parsed.scheme or not parsed.netloc:
|
1038
|
+
return False, "Invalid URL format. Must include scheme (http/https) and host."
|
1039
|
+
|
1040
|
+
async with httpx.AsyncClient() as client:
|
1041
|
+
try:
|
1042
|
+
# For SSE endpoints, try a GET request with short timeout
|
1043
|
+
# Many SSE servers don't support HEAD requests and return 404
|
1044
|
+
response = await client.get(
|
1045
|
+
url, timeout=2.0, headers={"Accept": "text/event-stream", **(headers or {})}
|
1046
|
+
)
|
1047
|
+
|
1048
|
+
# For SSE, we expect the server to either:
|
1049
|
+
# 1. Start streaming (200)
|
1050
|
+
# 2. Return 404 if HEAD/GET without proper SSE handshake is not supported
|
1051
|
+
# 3. Return other status codes that we should handle gracefully
|
1052
|
+
|
1053
|
+
# Don't fail on 404 since many SSE endpoints return this for non-SSE requests
|
1054
|
+
if response.status_code == HTTP_NOT_FOUND:
|
1055
|
+
# This is likely an SSE endpoint that doesn't support regular GET
|
1056
|
+
# Let the actual SSE connection attempt handle this
|
1057
|
+
return True, ""
|
1058
|
+
|
1059
|
+
# Fail on client errors except 404, but allow server errors and redirects
|
1060
|
+
if (
|
1061
|
+
HTTP_BAD_REQUEST <= response.status_code < HTTP_INTERNAL_SERVER_ERROR
|
1062
|
+
and response.status_code != HTTP_NOT_FOUND
|
1063
|
+
):
|
1064
|
+
return False, f"Server returned client error status: {response.status_code}"
|
1065
|
+
|
1066
|
+
except httpx.TimeoutException:
|
1067
|
+
# Timeout on a short request might indicate the server is trying to stream
|
1068
|
+
# This is actually expected behavior for SSE endpoints
|
1069
|
+
return True, ""
|
1070
|
+
except httpx.NetworkError:
|
1071
|
+
return False, "Network error. Could not reach the server."
|
1072
|
+
else:
|
1073
|
+
return True, ""
|
1074
|
+
|
1075
|
+
except (httpx.HTTPError, ValueError, OSError) as e:
|
1076
|
+
return False, f"URL validation error: {e!s}"
|
1077
|
+
|
1078
|
+
async def pre_check_redirect(self, url: str | None, headers: dict[str, str] | None = None) -> str | None:
|
1079
|
+
"""Check for redirects and return the final URL."""
|
1080
|
+
if url is None:
|
1081
|
+
return url
|
1082
|
+
try:
|
1083
|
+
async with httpx.AsyncClient(follow_redirects=False) as client:
|
1084
|
+
# Use GET with SSE headers instead of HEAD since many SSE servers don't support HEAD
|
1085
|
+
response = await client.get(
|
1086
|
+
url, timeout=2.0, headers={"Accept": "text/event-stream", **(headers or {})}
|
1087
|
+
)
|
1088
|
+
if response.status_code == httpx.codes.TEMPORARY_REDIRECT:
|
1089
|
+
return response.headers.get("Location", url)
|
1090
|
+
# Don't treat 404 as an error here - let the main connection handle it
|
1091
|
+
except (httpx.RequestError, httpx.HTTPError) as e:
|
1092
|
+
await logger.awarning(f"Error checking redirects: {e}")
|
1093
|
+
return url
|
1094
|
+
|
1095
|
+
async def _connect_to_server(
|
1096
|
+
self,
|
1097
|
+
url: str | None,
|
1098
|
+
headers: dict[str, str] | None = None,
|
1099
|
+
timeout_seconds: int = 30,
|
1100
|
+
sse_read_timeout_seconds: int = 30,
|
1101
|
+
) -> list[StructuredTool]:
|
1102
|
+
"""Connect to MCP server using SSE transport (SDK style)."""
|
1103
|
+
# Validate and sanitize headers early
|
1104
|
+
validated_headers = _process_headers(headers)
|
1105
|
+
|
1106
|
+
if url is None:
|
1107
|
+
msg = "URL is required for SSE mode"
|
1108
|
+
raise ValueError(msg)
|
1109
|
+
is_valid, error_msg = await self.validate_url(url, validated_headers)
|
1110
|
+
if not is_valid:
|
1111
|
+
msg = f"Invalid SSE URL ({url}): {error_msg}"
|
1112
|
+
raise ValueError(msg)
|
1113
|
+
|
1114
|
+
url = await self.pre_check_redirect(url, validated_headers)
|
1115
|
+
|
1116
|
+
# Store connection parameters for later use in run_tool
|
1117
|
+
self._connection_params = {
|
1118
|
+
"url": url,
|
1119
|
+
"headers": validated_headers,
|
1120
|
+
"timeout_seconds": timeout_seconds,
|
1121
|
+
"sse_read_timeout_seconds": sse_read_timeout_seconds,
|
1122
|
+
}
|
1123
|
+
|
1124
|
+
# If no session context is set, create a default one
|
1125
|
+
if not self._session_context:
|
1126
|
+
# Generate a fallback context based on connection parameters
|
1127
|
+
import uuid
|
1128
|
+
|
1129
|
+
param_hash = uuid.uuid4().hex[:8]
|
1130
|
+
self._session_context = f"default_sse_{param_hash}"
|
1131
|
+
|
1132
|
+
# Get or create a persistent session
|
1133
|
+
session = await self._get_or_create_session()
|
1134
|
+
response = await session.list_tools()
|
1135
|
+
self._connected = True
|
1136
|
+
return response.tools
|
1137
|
+
|
1138
|
+
async def connect_to_server(self, url: str, headers: dict[str, str] | None = None) -> list[StructuredTool]:
|
1139
|
+
"""Connect to MCP server using SSE transport (SDK style)."""
|
1140
|
+
return await asyncio.wait_for(
|
1141
|
+
self._connect_to_server(url, headers), timeout=get_settings_service().settings.mcp_server_timeout
|
1142
|
+
)
|
1143
|
+
|
1144
|
+
def set_session_context(self, context_id: str):
|
1145
|
+
"""Set the session context (e.g., flow_id + user_id + session_id)."""
|
1146
|
+
self._session_context = context_id
|
1147
|
+
|
1148
|
+
async def _get_or_create_session(self) -> ClientSession:
|
1149
|
+
"""Get or create a persistent session for the current context."""
|
1150
|
+
if not self._session_context or not self._connection_params:
|
1151
|
+
msg = "Session context and params must be set"
|
1152
|
+
raise ValueError(msg)
|
1153
|
+
|
1154
|
+
# Use cached session manager to get/create persistent session
|
1155
|
+
session_manager = self._get_session_manager()
|
1156
|
+
# Cache session so we can access server-assigned session_id later for DELETE
|
1157
|
+
self.session = await session_manager.get_session(self._session_context, self._connection_params, "sse")
|
1158
|
+
return self.session
|
1159
|
+
|
1160
|
+
async def _terminate_remote_session(self) -> None:
|
1161
|
+
"""Attempt to explicitly terminate the remote MCP session via HTTP DELETE (best-effort)."""
|
1162
|
+
# Only relevant for SSE transport
|
1163
|
+
if not self._connection_params or "url" not in self._connection_params:
|
1164
|
+
return
|
1165
|
+
|
1166
|
+
url: str = self._connection_params["url"]
|
1167
|
+
|
1168
|
+
# Retrieve session id from the underlying SDK if exposed
|
1169
|
+
session_id = None
|
1170
|
+
if getattr(self, "session", None) is not None:
|
1171
|
+
# Common attributes in MCP python SDK: `session_id` or `id`
|
1172
|
+
session_id = getattr(self.session, "session_id", None) or getattr(self.session, "id", None)
|
1173
|
+
|
1174
|
+
headers: dict[str, str] = dict(self._connection_params.get("headers", {}))
|
1175
|
+
if session_id:
|
1176
|
+
headers["Mcp-Session-Id"] = str(session_id)
|
1177
|
+
|
1178
|
+
try:
|
1179
|
+
async with httpx.AsyncClient(timeout=5.0) as client:
|
1180
|
+
await client.delete(url, headers=headers)
|
1181
|
+
except Exception as e: # noqa: BLE001
|
1182
|
+
# DELETE is advisory—log and continue
|
1183
|
+
logger.debug(f"Unable to send session DELETE to '{url}': {e}")
|
1184
|
+
|
1185
|
+
async def run_tool(self, tool_name: str, arguments: dict[str, Any]) -> Any:
|
1186
|
+
"""Run a tool with the given arguments using context-specific session.
|
1187
|
+
|
1188
|
+
Args:
|
1189
|
+
tool_name: Name of the tool to run
|
1190
|
+
arguments: Dictionary of arguments to pass to the tool
|
1191
|
+
|
1192
|
+
Returns:
|
1193
|
+
The result of the tool execution
|
1194
|
+
|
1195
|
+
Raises:
|
1196
|
+
ValueError: If session is not initialized or tool execution fails
|
1197
|
+
"""
|
1198
|
+
if not self._connected or not self._connection_params:
|
1199
|
+
msg = "Session not initialized or disconnected. Call connect_to_server first."
|
1200
|
+
raise ValueError(msg)
|
1201
|
+
|
1202
|
+
# If no session context is set, create a default one
|
1203
|
+
if not self._session_context:
|
1204
|
+
# Generate a fallback context based on connection parameters
|
1205
|
+
import uuid
|
1206
|
+
|
1207
|
+
param_hash = uuid.uuid4().hex[:8]
|
1208
|
+
self._session_context = f"default_sse_{param_hash}"
|
1209
|
+
|
1210
|
+
max_retries = 2
|
1211
|
+
last_error_type = None
|
1212
|
+
|
1213
|
+
for attempt in range(max_retries):
|
1214
|
+
try:
|
1215
|
+
await logger.adebug(f"Attempting to run tool '{tool_name}' (attempt {attempt + 1}/{max_retries})")
|
1216
|
+
# Get or create persistent session
|
1217
|
+
session = await self._get_or_create_session()
|
1218
|
+
|
1219
|
+
# Add timeout to prevent hanging
|
1220
|
+
import asyncio
|
1221
|
+
|
1222
|
+
result = await asyncio.wait_for(
|
1223
|
+
session.call_tool(tool_name, arguments=arguments),
|
1224
|
+
timeout=30.0, # 30 second timeout
|
1225
|
+
)
|
1226
|
+
except Exception as e:
|
1227
|
+
current_error_type = type(e).__name__
|
1228
|
+
await logger.awarning(f"Tool '{tool_name}' failed on attempt {attempt + 1}: {current_error_type} - {e}")
|
1229
|
+
|
1230
|
+
# Import specific MCP error types for detection
|
1231
|
+
try:
|
1232
|
+
from anyio import ClosedResourceError
|
1233
|
+
from mcp.shared.exceptions import McpError
|
1234
|
+
|
1235
|
+
is_closed_resource_error = isinstance(e, ClosedResourceError)
|
1236
|
+
is_mcp_connection_error = isinstance(e, McpError) and "Connection closed" in str(e)
|
1237
|
+
except ImportError:
|
1238
|
+
is_closed_resource_error = "ClosedResourceError" in str(type(e))
|
1239
|
+
is_mcp_connection_error = "Connection closed" in str(e)
|
1240
|
+
|
1241
|
+
# Detect timeout errors
|
1242
|
+
is_timeout_error = isinstance(e, asyncio.TimeoutError | TimeoutError)
|
1243
|
+
|
1244
|
+
# If we're getting the same error type repeatedly, don't retry
|
1245
|
+
if last_error_type == current_error_type and attempt > 0:
|
1246
|
+
await logger.aerror(f"Repeated {current_error_type} error for tool '{tool_name}', not retrying")
|
1247
|
+
break
|
1248
|
+
|
1249
|
+
last_error_type = current_error_type
|
1250
|
+
|
1251
|
+
# If it's a connection error (ClosedResourceError or MCP connection closed) and we have retries left
|
1252
|
+
if (is_closed_resource_error or is_mcp_connection_error) and attempt < max_retries - 1:
|
1253
|
+
await logger.awarning(
|
1254
|
+
f"MCP session connection issue for tool '{tool_name}', retrying with fresh session..."
|
1255
|
+
)
|
1256
|
+
# Clean up the dead session
|
1257
|
+
if self._session_context:
|
1258
|
+
session_manager = self._get_session_manager()
|
1259
|
+
await session_manager._cleanup_session(self._session_context)
|
1260
|
+
# Add a small delay before retry
|
1261
|
+
await asyncio.sleep(0.5)
|
1262
|
+
continue
|
1263
|
+
|
1264
|
+
# If it's a timeout error and we have retries left, try once more
|
1265
|
+
if is_timeout_error and attempt < max_retries - 1:
|
1266
|
+
await logger.awarning(f"Tool '{tool_name}' timed out, retrying...")
|
1267
|
+
# Don't clean up session for timeouts, might just be a slow response
|
1268
|
+
await asyncio.sleep(1.0)
|
1269
|
+
continue
|
1270
|
+
|
1271
|
+
# For other errors or no retries left, handle as before
|
1272
|
+
if (
|
1273
|
+
isinstance(e, ConnectionError | TimeoutError | OSError | ValueError)
|
1274
|
+
or is_closed_resource_error
|
1275
|
+
or is_mcp_connection_error
|
1276
|
+
or is_timeout_error
|
1277
|
+
):
|
1278
|
+
msg = f"Failed to run tool '{tool_name}' after {attempt + 1} attempts: {e}"
|
1279
|
+
await logger.aerror(msg)
|
1280
|
+
# Clean up failed session from cache
|
1281
|
+
if self._session_context and self._component_cache:
|
1282
|
+
cache_key = f"mcp_session_sse_{self._session_context}"
|
1283
|
+
self._component_cache.delete(cache_key)
|
1284
|
+
self._connected = False
|
1285
|
+
raise ValueError(msg) from e
|
1286
|
+
# Re-raise unexpected errors
|
1287
|
+
raise
|
1288
|
+
else:
|
1289
|
+
await logger.adebug(f"Tool '{tool_name}' completed successfully")
|
1290
|
+
return result
|
1291
|
+
|
1292
|
+
# This should never be reached due to the exception handling above
|
1293
|
+
msg = f"Failed to run tool '{tool_name}': Maximum retries exceeded with repeated {last_error_type} errors"
|
1294
|
+
await logger.aerror(msg)
|
1295
|
+
raise ValueError(msg)
|
1296
|
+
|
1297
|
+
async def disconnect(self):
|
1298
|
+
"""Properly close the connection and clean up resources."""
|
1299
|
+
# Attempt best-effort remote session termination first
|
1300
|
+
await self._terminate_remote_session()
|
1301
|
+
|
1302
|
+
# Clean up local session using the session manager
|
1303
|
+
if self._session_context:
|
1304
|
+
session_manager = self._get_session_manager()
|
1305
|
+
await session_manager._cleanup_session(self._session_context)
|
1306
|
+
|
1307
|
+
# Reset local state
|
1308
|
+
self.session = None
|
1309
|
+
self._connection_params = None
|
1310
|
+
self._connected = False
|
1311
|
+
self._session_context = None
|
1312
|
+
|
1313
|
+
async def __aenter__(self):
|
1314
|
+
return self
|
1315
|
+
|
1316
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
1317
|
+
await self.disconnect()
|
1318
|
+
|
1319
|
+
|
1320
|
+
async def update_tools(
|
1321
|
+
server_name: str,
|
1322
|
+
server_config: dict,
|
1323
|
+
mcp_stdio_client: MCPStdioClient | None = None,
|
1324
|
+
mcp_sse_client: MCPSseClient | None = None,
|
1325
|
+
) -> tuple[str, list[StructuredTool], dict[str, StructuredTool]]:
|
1326
|
+
"""Fetch server config and update available tools."""
|
1327
|
+
if server_config is None:
|
1328
|
+
server_config = {}
|
1329
|
+
if not server_name:
|
1330
|
+
return "", [], {}
|
1331
|
+
if mcp_stdio_client is None:
|
1332
|
+
mcp_stdio_client = MCPStdioClient()
|
1333
|
+
if mcp_sse_client is None:
|
1334
|
+
mcp_sse_client = MCPSseClient()
|
1335
|
+
|
1336
|
+
# Fetch server config from backend
|
1337
|
+
mode = "Stdio" if "command" in server_config else "SSE" if "url" in server_config else ""
|
1338
|
+
command = server_config.get("command", "")
|
1339
|
+
url = server_config.get("url", "")
|
1340
|
+
tools = []
|
1341
|
+
headers = _process_headers(server_config.get("headers", {}))
|
1342
|
+
|
1343
|
+
try:
|
1344
|
+
await _validate_connection_params(mode, command, url)
|
1345
|
+
except ValueError as e:
|
1346
|
+
logger.error(f"Invalid MCP server configuration for '{server_name}': {e}")
|
1347
|
+
raise
|
1348
|
+
|
1349
|
+
# Determine connection type and parameters
|
1350
|
+
client: MCPStdioClient | MCPSseClient | None = None
|
1351
|
+
if mode == "Stdio":
|
1352
|
+
# Stdio connection
|
1353
|
+
args = server_config.get("args", [])
|
1354
|
+
env = server_config.get("env", {})
|
1355
|
+
full_command = " ".join([command, *args])
|
1356
|
+
tools = await mcp_stdio_client.connect_to_server(full_command, env)
|
1357
|
+
client = mcp_stdio_client
|
1358
|
+
elif mode == "SSE":
|
1359
|
+
# SSE connection
|
1360
|
+
tools = await mcp_sse_client.connect_to_server(url, headers=headers)
|
1361
|
+
client = mcp_sse_client
|
1362
|
+
else:
|
1363
|
+
logger.error(f"Invalid MCP server mode for '{server_name}': {mode}")
|
1364
|
+
return "", [], {}
|
1365
|
+
|
1366
|
+
if not tools or not client or not client._connected:
|
1367
|
+
logger.warning(f"No tools available from MCP server '{server_name}' or connection failed")
|
1368
|
+
return "", [], {}
|
1369
|
+
|
1370
|
+
tool_list = []
|
1371
|
+
tool_cache: dict[str, StructuredTool] = {}
|
1372
|
+
for tool in tools:
|
1373
|
+
if not tool or not hasattr(tool, "name"):
|
1374
|
+
continue
|
1375
|
+
try:
|
1376
|
+
args_schema = create_input_schema_from_json_schema(tool.inputSchema)
|
1377
|
+
if not args_schema:
|
1378
|
+
logger.warning(f"Could not create schema for tool '{tool.name}' from server '{server_name}'")
|
1379
|
+
continue
|
1380
|
+
|
1381
|
+
tool_obj = StructuredTool(
|
1382
|
+
name=tool.name,
|
1383
|
+
description=tool.description or "",
|
1384
|
+
args_schema=args_schema,
|
1385
|
+
func=create_tool_func(tool.name, args_schema, client),
|
1386
|
+
coroutine=create_tool_coroutine(tool.name, args_schema, client),
|
1387
|
+
tags=[tool.name],
|
1388
|
+
metadata={"server_name": server_name},
|
1389
|
+
)
|
1390
|
+
tool_list.append(tool_obj)
|
1391
|
+
tool_cache[tool.name] = tool_obj
|
1392
|
+
except (ConnectionError, TimeoutError, OSError, ValueError) as e:
|
1393
|
+
logger.error(f"Failed to create tool '{tool.name}' from server '{server_name}': {e}")
|
1394
|
+
msg = f"Failed to create tool '{tool.name}' from server '{server_name}': {e}"
|
1395
|
+
raise ValueError(msg) from e
|
1396
|
+
|
1397
|
+
logger.info(f"Successfully loaded {len(tool_list)} tools from MCP server '{server_name}'")
|
1398
|
+
return mode, tool_list, tool_cache
|