lfx-nightly 0.2.0.dev25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/_assets/component_index.json +1 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +375 -0
- lfx/base/agents/altk_base_agent.py +380 -0
- lfx/base/agents/altk_tool_wrappers.py +565 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +430 -0
- lfx/base/agents/utils.py +237 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +2584 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +810 -0
- lfx/base/data/docling_utils.py +338 -0
- lfx/base/data/storage_utils.py +192 -0
- lfx/base/data/utils.py +362 -0
- lfx/base/datastax/__init__.py +5 -0
- lfx/base/datastax/astradb_base.py +896 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/embeddings_class.py +113 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +21 -0
- lfx/base/io/text.py +22 -0
- lfx/base/knowledge_bases/__init__.py +3 -0
- lfx/base/knowledge_bases/knowledge_base_utils.py +137 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1659 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +51 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/cometapi_constants.py +54 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/google_generative_ai_model.py +38 -0
- lfx/base/models/groq_constants.py +150 -0
- lfx/base/models/groq_model_discovery.py +265 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +378 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +108 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +52 -0
- lfx/base/models/openai_constants.py +129 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/models/watsonx_constants.py +36 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +698 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +327 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +506 -0
- lfx/cli/script_loader.py +289 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +428 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +115 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/altk/__init__.py +34 -0
- lfx/components/altk/altk_agent.py +193 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_converse.py +195 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +130 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +169 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +169 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/cometapi/__init__.py +32 -0
- lfx/components/cometapi/cometapi.py +166 -0
- lfx/components/composio/__init__.py +222 -0
- lfx/components/composio/agentql_composio.py +11 -0
- lfx/components/composio/agiled_composio.py +11 -0
- lfx/components/composio/airtable_composio.py +11 -0
- lfx/components/composio/apollo_composio.py +11 -0
- lfx/components/composio/asana_composio.py +11 -0
- lfx/components/composio/attio_composio.py +11 -0
- lfx/components/composio/bitbucket_composio.py +11 -0
- lfx/components/composio/bolna_composio.py +11 -0
- lfx/components/composio/brightdata_composio.py +11 -0
- lfx/components/composio/calendly_composio.py +11 -0
- lfx/components/composio/canva_composio.py +11 -0
- lfx/components/composio/canvas_composio.py +11 -0
- lfx/components/composio/coda_composio.py +11 -0
- lfx/components/composio/composio_api.py +278 -0
- lfx/components/composio/contentful_composio.py +11 -0
- lfx/components/composio/digicert_composio.py +11 -0
- lfx/components/composio/discord_composio.py +11 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/elevenlabs_composio.py +11 -0
- lfx/components/composio/exa_composio.py +11 -0
- lfx/components/composio/figma_composio.py +11 -0
- lfx/components/composio/finage_composio.py +11 -0
- lfx/components/composio/firecrawl_composio.py +11 -0
- lfx/components/composio/fireflies_composio.py +11 -0
- lfx/components/composio/fixer_composio.py +11 -0
- lfx/components/composio/flexisign_composio.py +11 -0
- lfx/components/composio/freshdesk_composio.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlebigquery_composio.py +11 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googleclassroom_composio.py +11 -0
- lfx/components/composio/googledocs_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googlesheets_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/heygen_composio.py +11 -0
- lfx/components/composio/instagram_composio.py +11 -0
- lfx/components/composio/jira_composio.py +11 -0
- lfx/components/composio/jotform_composio.py +11 -0
- lfx/components/composio/klaviyo_composio.py +11 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/listennotes_composio.py +11 -0
- lfx/components/composio/mem0_composio.py +11 -0
- lfx/components/composio/miro_composio.py +11 -0
- lfx/components/composio/missive_composio.py +11 -0
- lfx/components/composio/notion_composio.py +11 -0
- lfx/components/composio/onedrive_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/pandadoc_composio.py +11 -0
- lfx/components/composio/peopledatalabs_composio.py +11 -0
- lfx/components/composio/perplexityai_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/serpapi_composio.py +11 -0
- lfx/components/composio/slack_composio.py +11 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/snowflake_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/tavily_composio.py +11 -0
- lfx/components/composio/timelinesai_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/wrike_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +108 -0
- lfx/components/crewai/hierarchical_crew.py +47 -0
- lfx/components/crewai/hierarchical_task.py +45 -0
- lfx/components/crewai/sequential_crew.py +53 -0
- lfx/components/crewai/sequential_task.py +74 -0
- lfx/components/crewai/sequential_task_agent.py +144 -0
- lfx/components/cuga/__init__.py +34 -0
- lfx/components/cuga/cuga_agent.py +730 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +114 -0
- lfx/components/data_source/__init__.py +58 -0
- lfx/components/data_source/api_request.py +577 -0
- lfx/components/data_source/csv_to_data.py +101 -0
- lfx/components/data_source/json_to_data.py +106 -0
- lfx/components/data_source/mock_data.py +398 -0
- lfx/components/data_source/news_search.py +166 -0
- lfx/components/data_source/rss.py +71 -0
- lfx/components/data_source/sql_executor.py +101 -0
- lfx/components/data_source/url.py +311 -0
- lfx/components/data_source/web_search.py +326 -0
- lfx/components/datastax/__init__.py +76 -0
- lfx/components/datastax/astradb_assistant_manager.py +307 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +288 -0
- lfx/components/datastax/astradb_graph.py +217 -0
- lfx/components/datastax/astradb_tool.py +378 -0
- lfx/components/datastax/astradb_vectorize.py +122 -0
- lfx/components/datastax/astradb_vectorstore.py +449 -0
- lfx/components/datastax/create_assistant.py +59 -0
- lfx/components/datastax/create_thread.py +33 -0
- lfx/components/datastax/dotenv.py +36 -0
- lfx/components/datastax/get_assistant.py +38 -0
- lfx/components/datastax/getenvvar.py +31 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +315 -0
- lfx/components/datastax/list_assistants.py +26 -0
- lfx/components/datastax/run.py +90 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +238 -0
- lfx/components/docling/docling_remote.py +195 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +789 -0
- lfx/components/elastic/opensearch_multimodal.py +1575 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +77 -0
- lfx/components/embeddings/text_embedder.py +65 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/files_and_knowledge/__init__.py +47 -0
- lfx/components/files_and_knowledge/directory.py +113 -0
- lfx/components/files_and_knowledge/file.py +841 -0
- lfx/components/files_and_knowledge/ingestion.py +694 -0
- lfx/components/files_and_knowledge/retrieval.py +264 -0
- lfx/components/files_and_knowledge/save_file.py +746 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/flow_controls/__init__.py +58 -0
- lfx/components/flow_controls/conditional_router.py +208 -0
- lfx/components/flow_controls/data_conditional_router.py +126 -0
- lfx/components/flow_controls/flow_tool.py +111 -0
- lfx/components/flow_controls/listen.py +29 -0
- lfx/components/flow_controls/loop.py +163 -0
- lfx/components/flow_controls/notify.py +88 -0
- lfx/components/flow_controls/pass_message.py +36 -0
- lfx/components/flow_controls/run_flow.py +108 -0
- lfx/components/flow_controls/sub_flow.py +115 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +193 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +144 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +143 -0
- lfx/components/helpers/__init__.py +154 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +199 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +207 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +40 -0
- lfx/components/input_output/chat.py +109 -0
- lfx/components/input_output/chat_output.py +184 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/input_output/webhook.py +56 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/knowledge_bases/__init__.py +89 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +175 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +100 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/llm_operations/__init__.py +46 -0
- lfx/components/llm_operations/batch_run.py +205 -0
- lfx/components/llm_operations/lambda_filter.py +218 -0
- lfx/components/llm_operations/llm_conditional_router.py +421 -0
- lfx/components/llm_operations/llm_selector.py +499 -0
- lfx/components/llm_operations/structured_output.py +244 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +133 -0
- lfx/components/logic/__init__.py +181 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +147 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +89 -0
- lfx/components/models_and_agents/__init__.py +49 -0
- lfx/components/models_and_agents/agent.py +644 -0
- lfx/components/models_and_agents/embedding_model.py +423 -0
- lfx/components/models_and_agents/language_model.py +398 -0
- lfx/components/models_and_agents/mcp_component.py +594 -0
- lfx/components/models_and_agents/memory.py +268 -0
- lfx/components/models_and_agents/prompt.py +67 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +151 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +548 -0
- lfx/components/ollama/ollama_embeddings.py +103 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +104 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +72 -0
- lfx/components/processing/alter_metadata.py +109 -0
- lfx/components/processing/combine_text.py +40 -0
- lfx/components/processing/converter.py +248 -0
- lfx/components/processing/create_data.py +111 -0
- lfx/components/processing/create_list.py +40 -0
- lfx/components/processing/data_operations.py +528 -0
- lfx/components/processing/data_to_dataframe.py +71 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/dataframe_to_toolset.py +259 -0
- lfx/components/processing/dynamic_create_data.py +357 -0
- lfx/components/processing/extract_key.py +54 -0
- lfx/components/processing/filter_data.py +43 -0
- lfx/components/processing/filter_data_values.py +89 -0
- lfx/components/processing/json_cleaner.py +104 -0
- lfx/components/processing/merge_data.py +91 -0
- lfx/components/processing/message_to_data.py +37 -0
- lfx/components/processing/output_parser.py +46 -0
- lfx/components/processing/parse_data.py +71 -0
- lfx/components/processing/parse_dataframe.py +69 -0
- lfx/components/processing/parse_json_data.py +91 -0
- lfx/components/processing/parser.py +148 -0
- lfx/components/processing/regex.py +83 -0
- lfx/components/processing/select_data.py +49 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/store_message.py +91 -0
- lfx/components/processing/update_data.py +161 -0
- lfx/components/prototypes/__init__.py +35 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +66 -0
- lfx/components/tools/calculator.py +109 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +328 -0
- lfx/components/tools/python_repl.py +98 -0
- lfx/components/tools/search_api.py +88 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +120 -0
- lfx/components/tools/tavily_search_tool.py +345 -0
- lfx/components/tools/wikidata_api.py +103 -0
- lfx/components/tools/wikipedia_api.py +50 -0
- lfx/components/tools/yahoo_finance.py +130 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +301 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +191 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/utilities/__init__.py +43 -0
- lfx/components/utilities/calculator_core.py +89 -0
- lfx/components/utilities/current_date.py +42 -0
- lfx/components/utilities/id_generator.py +42 -0
- lfx/components/utilities/python_repl_core.py +98 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +34 -0
- lfx/components/vectorstores/local_db.py +270 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/vlmrun/__init__.py +34 -0
- lfx/components/vlmrun/vlmrun_transcription.py +224 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +206 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +45 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +87 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1890 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +650 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +523 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +216 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +300 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2298 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +53 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +250 -0
- lfx/graph/utils.py +206 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +826 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +316 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +141 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +308 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +352 -0
- lfx/inputs/inputs.py +718 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +897 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +317 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +295 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +411 -0
- lfx/logging/__init__.py +11 -0
- lfx/logging/logger.py +24 -0
- lfx/memory/__init__.py +70 -0
- lfx/memory/stubs.py +302 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/cross_module.py +80 -0
- lfx/schema/data.py +309 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +184 -0
- lfx/schema/json_schema.py +186 -0
- lfx/schema/log.py +62 -0
- lfx/schema/message.py +493 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +180 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +142 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +26 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/database/__init__.py +5 -0
- lfx/services/database/service.py +25 -0
- lfx/services/deps.py +194 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +185 -0
- lfx/services/mcp_composer/__init__.py +6 -0
- lfx/services/mcp_composer/factory.py +16 -0
- lfx/services/mcp_composer/service.py +1441 -0
- lfx/services/schema.py +21 -0
- lfx/services/session.py +87 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +133 -0
- lfx/services/settings/base.py +668 -0
- lfx/services/settings/constants.py +43 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +11 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +185 -0
- lfx/services/storage/service.py +177 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +260 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +214 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +233 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +34 -0
- lfx/utils/image.py +79 -0
- lfx/utils/langflow_utils.py +52 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/ssrf_protection.py +384 -0
- lfx/utils/util.py +626 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/validate_cloud.py +26 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.2.0.dev25.dist-info/METADATA +312 -0
- lfx_nightly-0.2.0.dev25.dist-info/RECORD +769 -0
- lfx_nightly-0.2.0.dev25.dist-info/WHEEL +4 -0
- lfx_nightly-0.2.0.dev25.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,1441 @@
|
|
|
1
|
+
"""MCP Composer service for proxying and orchestrating MCP servers."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import platform
|
|
7
|
+
import re
|
|
8
|
+
import select
|
|
9
|
+
import socket
|
|
10
|
+
import subprocess
|
|
11
|
+
import tempfile
|
|
12
|
+
import typing
|
|
13
|
+
from collections.abc import Callable
|
|
14
|
+
from functools import wraps
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any
|
|
17
|
+
|
|
18
|
+
from lfx.log.logger import logger
|
|
19
|
+
from lfx.services.base import Service
|
|
20
|
+
from lfx.services.deps import get_settings_service
|
|
21
|
+
|
|
22
|
+
GENERIC_STARTUP_ERROR_MSG = (
|
|
23
|
+
"MCP Composer startup failed. Check OAuth configuration and check logs for more information."
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class MCPComposerError(Exception):
|
|
28
|
+
"""Base exception for MCP Composer errors."""
|
|
29
|
+
|
|
30
|
+
def __init__(self, message: str | None, project_id: str | None = None):
|
|
31
|
+
if not message:
|
|
32
|
+
message = GENERIC_STARTUP_ERROR_MSG
|
|
33
|
+
self.message = message
|
|
34
|
+
self.project_id = project_id
|
|
35
|
+
super().__init__(message)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class MCPComposerPortError(MCPComposerError):
|
|
39
|
+
"""Port is already in use or unavailable."""
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class MCPComposerConfigError(MCPComposerError):
|
|
43
|
+
"""Invalid configuration provided."""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class MCPComposerDisabledError(MCPComposerError):
|
|
47
|
+
"""MCP Composer is disabled in settings."""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class MCPComposerStartupError(MCPComposerError):
|
|
51
|
+
"""Failed to start MCP Composer process."""
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def require_composer_enabled(func: Callable) -> Callable:
|
|
55
|
+
"""Decorator that checks if MCP Composer is enabled before executing the method."""
|
|
56
|
+
|
|
57
|
+
@wraps(func)
|
|
58
|
+
def wrapper(self, *args, **kwargs):
|
|
59
|
+
if not get_settings_service().settings.mcp_composer_enabled:
|
|
60
|
+
project_id = kwargs.get("project_id")
|
|
61
|
+
error_msg = "MCP Composer is disabled in settings"
|
|
62
|
+
raise MCPComposerDisabledError(error_msg, project_id)
|
|
63
|
+
|
|
64
|
+
return func(self, *args, **kwargs)
|
|
65
|
+
|
|
66
|
+
return wrapper
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class MCPComposerService(Service):
|
|
70
|
+
"""Service for managing per-project MCP Composer instances."""
|
|
71
|
+
|
|
72
|
+
name = "mcp_composer_service"
|
|
73
|
+
|
|
74
|
+
def __init__(self):
|
|
75
|
+
super().__init__()
|
|
76
|
+
self.project_composers: dict[str, dict] = {} # project_id -> {process, host, port, sse_url, auth_config}
|
|
77
|
+
self._start_locks: dict[
|
|
78
|
+
str, asyncio.Lock
|
|
79
|
+
] = {} # Lock to prevent concurrent start operations for the same project
|
|
80
|
+
self._active_start_tasks: dict[
|
|
81
|
+
str, asyncio.Task
|
|
82
|
+
] = {} # Track active start tasks to cancel them when new request arrives
|
|
83
|
+
self._port_to_project: dict[int, str] = {} # Track which project is using which port
|
|
84
|
+
self._pid_to_project: dict[int, str] = {} # Track which PID belongs to which project
|
|
85
|
+
self._last_errors: dict[str, str] = {} # Track last error message per project for UI display
|
|
86
|
+
|
|
87
|
+
def get_last_error(self, project_id: str) -> str | None:
|
|
88
|
+
"""Get the last error message for a project, if any."""
|
|
89
|
+
return self._last_errors.get(project_id)
|
|
90
|
+
|
|
91
|
+
def set_last_error(self, project_id: str, error_message: str) -> None:
|
|
92
|
+
"""Set the last error message for a project."""
|
|
93
|
+
self._last_errors[project_id] = error_message
|
|
94
|
+
|
|
95
|
+
def clear_last_error(self, project_id: str) -> None:
|
|
96
|
+
"""Clear the last error message for a project."""
|
|
97
|
+
self._last_errors.pop(project_id, None)
|
|
98
|
+
|
|
99
|
+
def _is_port_available(self, port: int, host: str = "localhost") -> bool:
|
|
100
|
+
"""Check if a port is available by trying to bind to it.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
port: Port number to check
|
|
104
|
+
host: Host to check (default: localhost)
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
True if port is available (not in use), False if in use
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
ValueError: If port is not in valid range (0-65535)
|
|
111
|
+
"""
|
|
112
|
+
import errno
|
|
113
|
+
|
|
114
|
+
# Validate port range before attempting bind
|
|
115
|
+
max_port = 65535
|
|
116
|
+
if not isinstance(port, int) or port < 0 or port > max_port:
|
|
117
|
+
msg = f"Invalid port number: {port}. Port must be between 0 and {max_port}."
|
|
118
|
+
raise ValueError(msg)
|
|
119
|
+
|
|
120
|
+
# Check both IPv4 and IPv6 to ensure port is truly available
|
|
121
|
+
# MCP Composer tries to bind on both, so we need to check both
|
|
122
|
+
|
|
123
|
+
# Check IPv4
|
|
124
|
+
try:
|
|
125
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
126
|
+
# Don't use SO_REUSEADDR here as it can give false positives
|
|
127
|
+
sock.bind((host, port))
|
|
128
|
+
except OSError:
|
|
129
|
+
return False # Port is in use on IPv4
|
|
130
|
+
|
|
131
|
+
# Check IPv6 (if supported on this system)
|
|
132
|
+
try:
|
|
133
|
+
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as sock:
|
|
134
|
+
# Don't use SO_REUSEADDR here as it can give false positives
|
|
135
|
+
# Use ::1 for localhost on IPv6
|
|
136
|
+
ipv6_host = "::1" if host in ("localhost", "127.0.0.1") else host
|
|
137
|
+
sock.bind((ipv6_host, port))
|
|
138
|
+
except OSError as e:
|
|
139
|
+
# Check if it's "address already in use" error
|
|
140
|
+
# errno.EADDRINUSE is 48 on macOS, 98 on Linux, 10048 on Windows (WSAEADDRINUSE)
|
|
141
|
+
# We check both the standard errno and Windows-specific error code
|
|
142
|
+
if e.errno in (errno.EADDRINUSE, 10048):
|
|
143
|
+
return False # Port is in use on IPv6
|
|
144
|
+
# For other errors (e.g., IPv6 not supported, EADDRNOTAVAIL), continue
|
|
145
|
+
# IPv6 might not be supported on this system, which is okay
|
|
146
|
+
|
|
147
|
+
return True # Port is available on both IPv4 and IPv6 (or IPv6 not supported)
|
|
148
|
+
|
|
149
|
+
async def _kill_process_on_port(self, port: int) -> bool:
|
|
150
|
+
"""Kill the process using the specified port.
|
|
151
|
+
|
|
152
|
+
Cross-platform implementation supporting Windows, macOS, and Linux.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
port: The port number to check
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
True if a process was found and killed, False otherwise
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
await logger.adebug(f"Checking for processes using port {port}...")
|
|
162
|
+
os_type = platform.system()
|
|
163
|
+
|
|
164
|
+
# Platform-specific command to find PID
|
|
165
|
+
if os_type == "Windows":
|
|
166
|
+
# Use netstat on Windows - use full path to avoid PATH issues
|
|
167
|
+
netstat_cmd = os.path.join(os.environ.get("SYSTEMROOT", "C:\\Windows"), "System32", "netstat.exe") # noqa: PTH118
|
|
168
|
+
result = await asyncio.to_thread(
|
|
169
|
+
subprocess.run,
|
|
170
|
+
[netstat_cmd, "-ano"],
|
|
171
|
+
capture_output=True,
|
|
172
|
+
text=True,
|
|
173
|
+
check=False,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
if result.returncode == 0:
|
|
177
|
+
# Parse netstat output to find PID
|
|
178
|
+
# Format: TCP 0.0.0.0:PORT 0.0.0.0:0 LISTENING PID
|
|
179
|
+
windows_pids: list[int] = []
|
|
180
|
+
for line in result.stdout.split("\n"):
|
|
181
|
+
if f":{port}" in line and "LISTENING" in line:
|
|
182
|
+
parts = line.split()
|
|
183
|
+
if parts:
|
|
184
|
+
try:
|
|
185
|
+
pid = int(parts[-1])
|
|
186
|
+
windows_pids.append(pid)
|
|
187
|
+
except (ValueError, IndexError):
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
await logger.adebug(f"Found {len(windows_pids)} process(es) using port {port}: {windows_pids}")
|
|
191
|
+
|
|
192
|
+
for pid in windows_pids:
|
|
193
|
+
try:
|
|
194
|
+
await logger.adebug(f"Attempting to kill process {pid} on port {port}...")
|
|
195
|
+
# Use taskkill on Windows - use full path to avoid PATH issues
|
|
196
|
+
taskkill_cmd = os.path.join( # noqa: PTH118
|
|
197
|
+
os.environ.get("SYSTEMROOT", "C:\\Windows"), "System32", "taskkill.exe"
|
|
198
|
+
)
|
|
199
|
+
kill_result = await asyncio.to_thread(
|
|
200
|
+
subprocess.run,
|
|
201
|
+
[taskkill_cmd, "/F", "/PID", str(pid)],
|
|
202
|
+
capture_output=True,
|
|
203
|
+
check=False,
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
if kill_result.returncode == 0:
|
|
207
|
+
await logger.adebug(f"Successfully killed process {pid} on port {port}")
|
|
208
|
+
return True
|
|
209
|
+
await logger.awarning(
|
|
210
|
+
f"taskkill returned {kill_result.returncode} for process {pid} on port {port}"
|
|
211
|
+
)
|
|
212
|
+
except Exception as e: # noqa: BLE001
|
|
213
|
+
await logger.aerror(f"Error killing PID {pid}: {e}")
|
|
214
|
+
|
|
215
|
+
return False
|
|
216
|
+
else:
|
|
217
|
+
# Use lsof on Unix-like systems (macOS, Linux)
|
|
218
|
+
result = await asyncio.to_thread(
|
|
219
|
+
subprocess.run,
|
|
220
|
+
["lsof", "-ti", f":{port}"],
|
|
221
|
+
capture_output=True,
|
|
222
|
+
text=True,
|
|
223
|
+
check=False,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
await logger.adebug(f"lsof returned code {result.returncode} for port {port}")
|
|
227
|
+
|
|
228
|
+
# Extract PIDs from lsof output
|
|
229
|
+
lsof_output = result.stdout.strip()
|
|
230
|
+
lsof_errors = result.stderr.strip()
|
|
231
|
+
|
|
232
|
+
if lsof_output:
|
|
233
|
+
await logger.adebug(f"lsof stdout: {lsof_output}")
|
|
234
|
+
if lsof_errors:
|
|
235
|
+
await logger.adebug(f"lsof stderr: {lsof_errors}")
|
|
236
|
+
|
|
237
|
+
if result.returncode == 0 and lsof_output:
|
|
238
|
+
unix_pids = lsof_output.split("\n")
|
|
239
|
+
await logger.adebug(f"Found {len(unix_pids)} process(es) using port {port}: {unix_pids}")
|
|
240
|
+
|
|
241
|
+
for pid_str in unix_pids:
|
|
242
|
+
try:
|
|
243
|
+
pid = int(pid_str.strip())
|
|
244
|
+
await logger.adebug(f"Attempting to kill process {pid} on port {port}...")
|
|
245
|
+
|
|
246
|
+
# Try to kill the process
|
|
247
|
+
kill_result = await asyncio.to_thread(
|
|
248
|
+
subprocess.run,
|
|
249
|
+
["kill", "-9", str(pid)],
|
|
250
|
+
capture_output=True,
|
|
251
|
+
check=False,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
if kill_result.returncode == 0:
|
|
255
|
+
await logger.adebug(f"Successfully sent kill signal to process {pid} on port {port}")
|
|
256
|
+
return True
|
|
257
|
+
await logger.awarning(
|
|
258
|
+
f"kill command returned {kill_result.returncode} for process {pid} on port {port}"
|
|
259
|
+
)
|
|
260
|
+
except (ValueError, ProcessLookupError) as e:
|
|
261
|
+
await logger.aerror(f"Error processing PID {pid_str}: {e}")
|
|
262
|
+
|
|
263
|
+
# If we get here, we found processes but couldn't kill any
|
|
264
|
+
return False
|
|
265
|
+
await logger.adebug(f"No process found using port {port}")
|
|
266
|
+
return False
|
|
267
|
+
except Exception as e: # noqa: BLE001
|
|
268
|
+
await logger.aerror(f"Error finding/killing process on port {port}: {e}")
|
|
269
|
+
return False
|
|
270
|
+
return False
|
|
271
|
+
|
|
272
|
+
async def _kill_zombie_mcp_processes(self, port: int) -> bool:
|
|
273
|
+
"""Kill zombie MCP Composer processes that may be stuck.
|
|
274
|
+
|
|
275
|
+
On Windows, sometimes MCP Composer processes start but fail to bind to port.
|
|
276
|
+
These processes become "zombies" that need to be killed before retry.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
port: The port that should be used
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
True if zombie processes were found and killed
|
|
283
|
+
"""
|
|
284
|
+
try:
|
|
285
|
+
os_type = platform.system()
|
|
286
|
+
if os_type != "Windows":
|
|
287
|
+
return False
|
|
288
|
+
|
|
289
|
+
await logger.adebug(f"Looking for zombie MCP Composer processes on Windows for port {port}...")
|
|
290
|
+
|
|
291
|
+
# First, try to find and kill any process using the port directly
|
|
292
|
+
# Use full path to netstat on Windows to avoid PATH issues
|
|
293
|
+
netstat_cmd = os.path.join(os.environ.get("SYSTEMROOT", "C:\\Windows"), "System32", "netstat.exe") # noqa: PTH118
|
|
294
|
+
netstat_result = await asyncio.to_thread(
|
|
295
|
+
subprocess.run,
|
|
296
|
+
[netstat_cmd, "-ano"],
|
|
297
|
+
capture_output=True,
|
|
298
|
+
text=True,
|
|
299
|
+
check=False,
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
killed_any = False
|
|
303
|
+
if netstat_result.returncode == 0:
|
|
304
|
+
# Parse netstat output to find PIDs using our port
|
|
305
|
+
pids_on_port: list[int] = []
|
|
306
|
+
for line in netstat_result.stdout.split("\n"):
|
|
307
|
+
if f":{port}" in line and "LISTENING" in line:
|
|
308
|
+
parts = line.split()
|
|
309
|
+
if parts:
|
|
310
|
+
try:
|
|
311
|
+
pid = int(parts[-1])
|
|
312
|
+
# Only kill if not tracked by us
|
|
313
|
+
if pid not in self._pid_to_project:
|
|
314
|
+
pids_on_port.append(pid)
|
|
315
|
+
else:
|
|
316
|
+
project = self._pid_to_project[pid]
|
|
317
|
+
await logger.adebug(
|
|
318
|
+
f"Process {pid} on port {port} is tracked, skipping (project: {project})"
|
|
319
|
+
)
|
|
320
|
+
except (ValueError, IndexError):
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
if pids_on_port:
|
|
324
|
+
await logger.adebug(
|
|
325
|
+
f"Found {len(pids_on_port)} untracked process(es) on port {port}: {pids_on_port}"
|
|
326
|
+
)
|
|
327
|
+
for pid in pids_on_port:
|
|
328
|
+
try:
|
|
329
|
+
await logger.adebug(f"Killing process {pid} on port {port}...")
|
|
330
|
+
# Use full path to taskkill on Windows to avoid PATH issues
|
|
331
|
+
taskkill_cmd = os.path.join( # noqa: PTH118
|
|
332
|
+
os.environ.get("SYSTEMROOT", "C:\\Windows"), "System32", "taskkill.exe"
|
|
333
|
+
)
|
|
334
|
+
kill_result = await asyncio.to_thread(
|
|
335
|
+
subprocess.run,
|
|
336
|
+
[taskkill_cmd, "/F", "/PID", str(pid)],
|
|
337
|
+
capture_output=True,
|
|
338
|
+
check=False,
|
|
339
|
+
)
|
|
340
|
+
if kill_result.returncode == 0:
|
|
341
|
+
await logger.adebug(f"Successfully killed process {pid} on port {port}")
|
|
342
|
+
killed_any = True
|
|
343
|
+
else:
|
|
344
|
+
stderr_output = (
|
|
345
|
+
kill_result.stderr.decode()
|
|
346
|
+
if isinstance(kill_result.stderr, bytes)
|
|
347
|
+
else kill_result.stderr
|
|
348
|
+
)
|
|
349
|
+
await logger.awarning(f"Failed to kill process {pid} on port {port}: {stderr_output}")
|
|
350
|
+
except Exception as e: # noqa: BLE001
|
|
351
|
+
await logger.adebug(f"Error killing process {pid}: {e}")
|
|
352
|
+
|
|
353
|
+
# Also look for any orphaned mcp-composer processes (without checking port)
|
|
354
|
+
# This catches processes that failed to bind but are still running
|
|
355
|
+
# Use PowerShell instead of deprecated wmic.exe for Windows 10/11 compatibility
|
|
356
|
+
try:
|
|
357
|
+
# Use PowerShell to get Python processes with command line info
|
|
358
|
+
# Build PowerShell command to find MCP Composer processes
|
|
359
|
+
ps_filter = (
|
|
360
|
+
f"$_.Name -eq 'python.exe' -and $_.CommandLine -like '*mcp-composer*' "
|
|
361
|
+
f"-and ($_.CommandLine -like '*--port {port}*' -or $_.CommandLine -like '*--port={port}*')"
|
|
362
|
+
)
|
|
363
|
+
ps_cmd = (
|
|
364
|
+
f"Get-WmiObject Win32_Process | Where-Object {{ {ps_filter} }} | "
|
|
365
|
+
"Select-Object ProcessId,CommandLine | ConvertTo-Json"
|
|
366
|
+
)
|
|
367
|
+
powershell_cmd = ["powershell.exe", "-NoProfile", "-Command", ps_cmd]
|
|
368
|
+
|
|
369
|
+
ps_result = await asyncio.to_thread(
|
|
370
|
+
subprocess.run,
|
|
371
|
+
powershell_cmd,
|
|
372
|
+
capture_output=True,
|
|
373
|
+
text=True,
|
|
374
|
+
check=False,
|
|
375
|
+
timeout=5,
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
if ps_result.returncode == 0 and ps_result.stdout.strip():
|
|
379
|
+
try:
|
|
380
|
+
# PowerShell may return single object or array
|
|
381
|
+
processes = json.loads(ps_result.stdout)
|
|
382
|
+
if isinstance(processes, dict):
|
|
383
|
+
processes = [processes]
|
|
384
|
+
elif not isinstance(processes, list):
|
|
385
|
+
processes = []
|
|
386
|
+
|
|
387
|
+
for proc in processes:
|
|
388
|
+
try:
|
|
389
|
+
pid = int(proc.get("ProcessId", 0))
|
|
390
|
+
if pid <= 0 or pid in self._pid_to_project:
|
|
391
|
+
continue
|
|
392
|
+
|
|
393
|
+
await logger.adebug(
|
|
394
|
+
f"Found orphaned MCP Composer process {pid} for port {port}, killing it"
|
|
395
|
+
)
|
|
396
|
+
# Use full path to taskkill on Windows to avoid PATH issues
|
|
397
|
+
taskkill_cmd = os.path.join( # noqa: PTH118
|
|
398
|
+
os.environ.get("SYSTEMROOT", "C:\\Windows"), "System32", "taskkill.exe"
|
|
399
|
+
)
|
|
400
|
+
kill_result = await asyncio.to_thread(
|
|
401
|
+
subprocess.run,
|
|
402
|
+
[taskkill_cmd, "/F", "/PID", str(pid)],
|
|
403
|
+
capture_output=True,
|
|
404
|
+
check=False,
|
|
405
|
+
)
|
|
406
|
+
if kill_result.returncode == 0:
|
|
407
|
+
await logger.adebug(f"Successfully killed orphaned process {pid}")
|
|
408
|
+
killed_any = True
|
|
409
|
+
|
|
410
|
+
except (ValueError, KeyError) as e:
|
|
411
|
+
await logger.adebug(f"Error processing PowerShell result: {e}")
|
|
412
|
+
continue
|
|
413
|
+
|
|
414
|
+
except json.JSONDecodeError as e:
|
|
415
|
+
await logger.adebug(f"Failed to parse PowerShell output: {e}")
|
|
416
|
+
|
|
417
|
+
except asyncio.TimeoutError:
|
|
418
|
+
await logger.adebug("PowerShell command timed out while checking for orphaned processes")
|
|
419
|
+
except Exception as e: # noqa: BLE001
|
|
420
|
+
await logger.adebug(f"Error using PowerShell to find orphaned processes: {e}")
|
|
421
|
+
|
|
422
|
+
if killed_any:
|
|
423
|
+
# Give Windows time to clean up
|
|
424
|
+
await logger.adebug("Waiting 3 seconds for Windows to release port...")
|
|
425
|
+
await asyncio.sleep(3)
|
|
426
|
+
|
|
427
|
+
return killed_any # noqa: TRY300
|
|
428
|
+
|
|
429
|
+
except Exception as e: # noqa: BLE001
|
|
430
|
+
await logger.adebug(f"Error killing zombie processes: {e}")
|
|
431
|
+
return False
|
|
432
|
+
|
|
433
|
+
def _is_port_used_by_another_project(self, port: int, current_project_id: str) -> tuple[bool, str | None]:
|
|
434
|
+
"""Check if a port is being used by another project.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
port: The port to check
|
|
438
|
+
current_project_id: The current project ID
|
|
439
|
+
|
|
440
|
+
Returns:
|
|
441
|
+
Tuple of (is_used_by_other, other_project_id)
|
|
442
|
+
"""
|
|
443
|
+
other_project_id = self._port_to_project.get(port)
|
|
444
|
+
if other_project_id and other_project_id != current_project_id:
|
|
445
|
+
return True, other_project_id
|
|
446
|
+
return False, None
|
|
447
|
+
|
|
448
|
+
async def start(self):
|
|
449
|
+
"""Check if the MCP Composer service is enabled."""
|
|
450
|
+
settings = get_settings_service().settings
|
|
451
|
+
if not settings.mcp_composer_enabled:
|
|
452
|
+
await logger.adebug(
|
|
453
|
+
"MCP Composer is disabled in settings. OAuth authentication will not be enabled for MCP Servers."
|
|
454
|
+
)
|
|
455
|
+
else:
|
|
456
|
+
await logger.adebug(
|
|
457
|
+
"MCP Composer is enabled in settings. OAuth authentication will be enabled for MCP Servers."
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
async def stop(self):
|
|
461
|
+
"""Stop all MCP Composer instances."""
|
|
462
|
+
for project_id in list(self.project_composers.keys()):
|
|
463
|
+
await self.stop_project_composer(project_id)
|
|
464
|
+
await logger.adebug("All MCP Composer instances stopped")
|
|
465
|
+
|
|
466
|
+
@require_composer_enabled
|
|
467
|
+
async def stop_project_composer(self, project_id: str):
|
|
468
|
+
"""Stop the MCP Composer instance for a specific project."""
|
|
469
|
+
if project_id not in self.project_composers:
|
|
470
|
+
return
|
|
471
|
+
|
|
472
|
+
# Use the same lock to ensure consistency
|
|
473
|
+
if project_id in self._start_locks:
|
|
474
|
+
async with self._start_locks[project_id]:
|
|
475
|
+
await self._do_stop_project_composer(project_id)
|
|
476
|
+
# Clean up the lock as well
|
|
477
|
+
del self._start_locks[project_id]
|
|
478
|
+
else:
|
|
479
|
+
# Fallback if no lock exists
|
|
480
|
+
await self._do_stop_project_composer(project_id)
|
|
481
|
+
|
|
482
|
+
async def _do_stop_project_composer(self, project_id: str):
|
|
483
|
+
"""Internal method to stop a project composer."""
|
|
484
|
+
if project_id not in self.project_composers:
|
|
485
|
+
return
|
|
486
|
+
|
|
487
|
+
composer_info = self.project_composers[project_id]
|
|
488
|
+
process = composer_info.get("process")
|
|
489
|
+
|
|
490
|
+
try:
|
|
491
|
+
if process:
|
|
492
|
+
try:
|
|
493
|
+
# Check if process is still running before trying to terminate
|
|
494
|
+
if process.poll() is None:
|
|
495
|
+
await logger.adebug(f"Terminating MCP Composer process {process.pid} for project {project_id}")
|
|
496
|
+
process.terminate()
|
|
497
|
+
|
|
498
|
+
# Wait longer for graceful shutdown
|
|
499
|
+
try:
|
|
500
|
+
await asyncio.wait_for(asyncio.to_thread(process.wait), timeout=2.0)
|
|
501
|
+
await logger.adebug(f"MCP Composer for project {project_id} terminated gracefully")
|
|
502
|
+
except asyncio.TimeoutError:
|
|
503
|
+
await logger.aerror(
|
|
504
|
+
f"MCP Composer for project {project_id} did not terminate gracefully, force killing"
|
|
505
|
+
)
|
|
506
|
+
await asyncio.to_thread(process.kill)
|
|
507
|
+
await asyncio.to_thread(process.wait)
|
|
508
|
+
else:
|
|
509
|
+
await logger.adebug(f"MCP Composer process for project {project_id} was already terminated")
|
|
510
|
+
|
|
511
|
+
await logger.adebug(f"MCP Composer stopped for project {project_id}")
|
|
512
|
+
|
|
513
|
+
except ProcessLookupError:
|
|
514
|
+
# Process already terminated
|
|
515
|
+
await logger.adebug(f"MCP Composer process for project {project_id} was already terminated")
|
|
516
|
+
except Exception as e: # noqa: BLE001
|
|
517
|
+
await logger.aerror(f"Error stopping MCP Composer for project {project_id}: {e}")
|
|
518
|
+
finally:
|
|
519
|
+
# Always clean up tracking, even if stopping failed
|
|
520
|
+
port = composer_info.get("port")
|
|
521
|
+
if port and self._port_to_project.get(port) == project_id:
|
|
522
|
+
self._port_to_project.pop(port, None)
|
|
523
|
+
await logger.adebug(f"Released port {port} from project {project_id}")
|
|
524
|
+
|
|
525
|
+
# Clean up PID tracking
|
|
526
|
+
if process and process.pid:
|
|
527
|
+
self._pid_to_project.pop(process.pid, None)
|
|
528
|
+
await logger.adebug(f"Released PID {process.pid} tracking for project {project_id}")
|
|
529
|
+
|
|
530
|
+
# Remove from tracking
|
|
531
|
+
self.project_composers.pop(project_id, None)
|
|
532
|
+
await logger.adebug(f"Removed tracking for project {project_id}")
|
|
533
|
+
|
|
534
|
+
async def _wait_for_process_exit(self, process):
|
|
535
|
+
"""Wait for a process to exit."""
|
|
536
|
+
await asyncio.to_thread(process.wait)
|
|
537
|
+
|
|
538
|
+
async def _read_process_output_and_extract_error(
|
|
539
|
+
self,
|
|
540
|
+
process: subprocess.Popen,
|
|
541
|
+
oauth_server_url: str | None,
|
|
542
|
+
timeout: float = 2.0,
|
|
543
|
+
stdout_file=None,
|
|
544
|
+
stderr_file=None,
|
|
545
|
+
) -> tuple[str, str, str]:
|
|
546
|
+
"""Read process output and extract user-friendly error message.
|
|
547
|
+
|
|
548
|
+
Args:
|
|
549
|
+
process: The subprocess to read from
|
|
550
|
+
oauth_server_url: OAuth server URL for error messages
|
|
551
|
+
timeout: Timeout for reading output
|
|
552
|
+
stdout_file: Optional file handle for stdout (Windows)
|
|
553
|
+
stderr_file: Optional file handle for stderr (Windows)
|
|
554
|
+
|
|
555
|
+
Returns:
|
|
556
|
+
Tuple of (stdout, stderr, error_message)
|
|
557
|
+
"""
|
|
558
|
+
stdout_content = ""
|
|
559
|
+
stderr_content = ""
|
|
560
|
+
|
|
561
|
+
try:
|
|
562
|
+
# On Windows with temp files, read from files instead of pipes
|
|
563
|
+
if stdout_file and stderr_file:
|
|
564
|
+
# Close file handles to flush and allow reading
|
|
565
|
+
try:
|
|
566
|
+
stdout_file.close()
|
|
567
|
+
stderr_file.close()
|
|
568
|
+
except Exception as e: # noqa: BLE001
|
|
569
|
+
await logger.adebug(f"Error closing temp files: {e}")
|
|
570
|
+
|
|
571
|
+
# Read from temp files using asyncio.to_thread
|
|
572
|
+
try:
|
|
573
|
+
|
|
574
|
+
def read_file(filepath):
|
|
575
|
+
return Path(filepath).read_bytes()
|
|
576
|
+
|
|
577
|
+
stdout_bytes = await asyncio.to_thread(read_file, stdout_file.name)
|
|
578
|
+
stdout_content = stdout_bytes.decode("utf-8", errors="replace") if stdout_bytes else ""
|
|
579
|
+
except Exception as e: # noqa: BLE001
|
|
580
|
+
await logger.adebug(f"Error reading stdout file: {e}")
|
|
581
|
+
|
|
582
|
+
try:
|
|
583
|
+
|
|
584
|
+
def read_file(filepath):
|
|
585
|
+
return Path(filepath).read_bytes()
|
|
586
|
+
|
|
587
|
+
stderr_bytes = await asyncio.to_thread(read_file, stderr_file.name)
|
|
588
|
+
stderr_content = stderr_bytes.decode("utf-8", errors="replace") if stderr_bytes else ""
|
|
589
|
+
except Exception as e: # noqa: BLE001
|
|
590
|
+
await logger.adebug(f"Error reading stderr file: {e}")
|
|
591
|
+
|
|
592
|
+
# Clean up temp files
|
|
593
|
+
try:
|
|
594
|
+
Path(stdout_file.name).unlink()
|
|
595
|
+
Path(stderr_file.name).unlink()
|
|
596
|
+
except Exception as e: # noqa: BLE001
|
|
597
|
+
await logger.adebug(f"Error removing temp files: {e}")
|
|
598
|
+
else:
|
|
599
|
+
# Use asyncio.to_thread to avoid blocking the event loop
|
|
600
|
+
# Process returns bytes, decode with error handling
|
|
601
|
+
stdout_bytes, stderr_bytes = await asyncio.to_thread(process.communicate, timeout=timeout)
|
|
602
|
+
stdout_content = stdout_bytes.decode("utf-8", errors="replace") if stdout_bytes else ""
|
|
603
|
+
stderr_content = stderr_bytes.decode("utf-8", errors="replace") if stderr_bytes else ""
|
|
604
|
+
|
|
605
|
+
except subprocess.TimeoutExpired:
|
|
606
|
+
process.kill()
|
|
607
|
+
error_msg = self._extract_error_message("", "", oauth_server_url)
|
|
608
|
+
return "", "", error_msg
|
|
609
|
+
|
|
610
|
+
error_msg = self._extract_error_message(stdout_content, stderr_content, oauth_server_url)
|
|
611
|
+
return stdout_content, stderr_content, error_msg
|
|
612
|
+
|
|
613
|
+
async def _read_stream_non_blocking(self, stream, stream_name: str) -> str:
|
|
614
|
+
"""Read from a stream without blocking and log the content.
|
|
615
|
+
|
|
616
|
+
Args:
|
|
617
|
+
stream: The stream to read from (stdout or stderr)
|
|
618
|
+
stream_name: Name of the stream for logging ("stdout" or "stderr")
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
The content read from the stream (empty string if nothing available)
|
|
622
|
+
"""
|
|
623
|
+
if not stream:
|
|
624
|
+
return ""
|
|
625
|
+
|
|
626
|
+
try:
|
|
627
|
+
# On Windows, select.select() doesn't work with pipes (only sockets)
|
|
628
|
+
# Use platform-specific approach
|
|
629
|
+
os_type = platform.system()
|
|
630
|
+
|
|
631
|
+
if os_type == "Windows":
|
|
632
|
+
# On Windows, select.select() doesn't work with pipes
|
|
633
|
+
# Skip stream reading during monitoring - output will be captured when process terminates
|
|
634
|
+
# This prevents blocking on peek() which can cause the monitoring loop to hang
|
|
635
|
+
return ""
|
|
636
|
+
# On Unix-like systems, use select
|
|
637
|
+
if select.select([stream], [], [], 0)[0]:
|
|
638
|
+
line_bytes = stream.readline()
|
|
639
|
+
if line_bytes:
|
|
640
|
+
# Decode bytes with error handling
|
|
641
|
+
line = line_bytes.decode("utf-8", errors="replace") if isinstance(line_bytes, bytes) else line_bytes
|
|
642
|
+
stripped = line.strip()
|
|
643
|
+
if stripped:
|
|
644
|
+
# Log errors at error level, everything else at debug
|
|
645
|
+
if stream_name == "stderr" and ("ERROR" in stripped or "error" in stripped):
|
|
646
|
+
await logger.aerror(f"MCP Composer {stream_name}: {stripped}")
|
|
647
|
+
else:
|
|
648
|
+
await logger.adebug(f"MCP Composer {stream_name}: {stripped}")
|
|
649
|
+
return stripped
|
|
650
|
+
except Exception as e: # noqa: BLE001
|
|
651
|
+
await logger.adebug(f"Error reading {stream_name}: {e}")
|
|
652
|
+
return ""
|
|
653
|
+
|
|
654
|
+
async def _ensure_port_available(self, port: int, current_project_id: str) -> None:
|
|
655
|
+
"""Ensure a port is available, only killing untracked processes.
|
|
656
|
+
|
|
657
|
+
Args:
|
|
658
|
+
port: The port number to ensure is available
|
|
659
|
+
current_project_id: The project ID requesting the port
|
|
660
|
+
|
|
661
|
+
Raises:
|
|
662
|
+
MCPComposerPortError: If port cannot be made available
|
|
663
|
+
MCPComposerConfigError: If port is invalid
|
|
664
|
+
"""
|
|
665
|
+
try:
|
|
666
|
+
is_port_available = self._is_port_available(port)
|
|
667
|
+
await logger.adebug(f"Port {port} availability check: {is_port_available}")
|
|
668
|
+
except (ValueError, OverflowError, TypeError) as e:
|
|
669
|
+
# Port validation failed - invalid port number or type
|
|
670
|
+
# ValueError: from our validation
|
|
671
|
+
# OverflowError: from socket.bind() when port > 65535
|
|
672
|
+
# TypeError: when port is not an integer
|
|
673
|
+
error_msg = f"Invalid port number: {port}. Port must be an integer between 0 and 65535."
|
|
674
|
+
await logger.aerror(f"Invalid port for project {current_project_id}: {e}")
|
|
675
|
+
raise MCPComposerConfigError(error_msg, current_project_id) from e
|
|
676
|
+
|
|
677
|
+
if not is_port_available:
|
|
678
|
+
# Check if the port is being used by a tracked project
|
|
679
|
+
is_used_by_other, other_project_id = self._is_port_used_by_another_project(port, current_project_id)
|
|
680
|
+
|
|
681
|
+
if is_used_by_other and other_project_id:
|
|
682
|
+
# Port is being used by another tracked project
|
|
683
|
+
# Check if we can take ownership (e.g., the other project is failing)
|
|
684
|
+
other_composer = self.project_composers.get(other_project_id)
|
|
685
|
+
if other_composer and other_composer.get("process"):
|
|
686
|
+
other_process = other_composer["process"]
|
|
687
|
+
# If the other process is still running and healthy, don't kill it
|
|
688
|
+
if other_process.poll() is None:
|
|
689
|
+
await logger.aerror(
|
|
690
|
+
f"Port {port} requested by project {current_project_id} is already in use by "
|
|
691
|
+
f"project {other_project_id}. Will not kill active MCP Composer process."
|
|
692
|
+
)
|
|
693
|
+
port_error_msg = (
|
|
694
|
+
f"Port {port} is already in use by another project. "
|
|
695
|
+
f"Please choose a different port (e.g., {port + 1}) "
|
|
696
|
+
f"or disable OAuth on the other project first."
|
|
697
|
+
)
|
|
698
|
+
raise MCPComposerPortError(port_error_msg, current_project_id)
|
|
699
|
+
|
|
700
|
+
# Process died but port tracking wasn't cleaned up - allow takeover
|
|
701
|
+
await logger.adebug(
|
|
702
|
+
f"Port {port} was tracked to project {other_project_id} but process died. "
|
|
703
|
+
f"Allowing project {current_project_id} to take ownership."
|
|
704
|
+
)
|
|
705
|
+
# Clean up the old tracking
|
|
706
|
+
await self._do_stop_project_composer(other_project_id)
|
|
707
|
+
|
|
708
|
+
# Check if port is used by a process owned by the current project (e.g., stuck in startup loop)
|
|
709
|
+
port_owner_project = self._port_to_project.get(port)
|
|
710
|
+
if port_owner_project == current_project_id:
|
|
711
|
+
# Port is owned by current project - safe to kill
|
|
712
|
+
await logger.adebug(
|
|
713
|
+
f"Port {port} is in use by current project {current_project_id} (likely stuck in startup). "
|
|
714
|
+
f"Killing process to retry."
|
|
715
|
+
)
|
|
716
|
+
killed = await self._kill_process_on_port(port)
|
|
717
|
+
if killed:
|
|
718
|
+
await logger.adebug(
|
|
719
|
+
f"Successfully killed own process on port {port}. Waiting for port to be released..."
|
|
720
|
+
)
|
|
721
|
+
await asyncio.sleep(2)
|
|
722
|
+
is_port_available = self._is_port_available(port)
|
|
723
|
+
if not is_port_available:
|
|
724
|
+
await logger.aerror(f"Port {port} is still in use after killing own process.")
|
|
725
|
+
port_error_msg = f"Port {port} is still in use after killing process"
|
|
726
|
+
raise MCPComposerPortError(port_error_msg)
|
|
727
|
+
else:
|
|
728
|
+
# Port is in use by unknown process - don't kill it (security concern)
|
|
729
|
+
await logger.aerror(
|
|
730
|
+
f"Port {port} is in use by an unknown process (not owned by Langflow). "
|
|
731
|
+
f"Will not kill external application for security reasons."
|
|
732
|
+
)
|
|
733
|
+
port_error_msg = (
|
|
734
|
+
f"Port {port} is already in use by another application. "
|
|
735
|
+
f"Please choose a different port (e.g., {port + 1}) or free up the port manually."
|
|
736
|
+
)
|
|
737
|
+
raise MCPComposerPortError(port_error_msg, current_project_id)
|
|
738
|
+
|
|
739
|
+
await logger.adebug(f"Port {port} is available, proceeding with MCP Composer startup")
|
|
740
|
+
|
|
741
|
+
async def _log_startup_error_details(
|
|
742
|
+
self,
|
|
743
|
+
project_id: str,
|
|
744
|
+
cmd: list[str],
|
|
745
|
+
host: str,
|
|
746
|
+
port: int,
|
|
747
|
+
stdout: str = "",
|
|
748
|
+
stderr: str = "",
|
|
749
|
+
error_msg: str = "",
|
|
750
|
+
exit_code: int | None = None,
|
|
751
|
+
pid: int | None = None,
|
|
752
|
+
) -> None:
|
|
753
|
+
"""Log detailed startup error information.
|
|
754
|
+
|
|
755
|
+
Args:
|
|
756
|
+
project_id: The project ID
|
|
757
|
+
cmd: The command that was executed
|
|
758
|
+
host: Target host
|
|
759
|
+
port: Target port
|
|
760
|
+
stdout: Standard output from the process
|
|
761
|
+
stderr: Standard error from the process
|
|
762
|
+
error_msg: User-friendly error message
|
|
763
|
+
exit_code: Process exit code (if terminated)
|
|
764
|
+
pid: Process ID (if still running)
|
|
765
|
+
"""
|
|
766
|
+
await logger.aerror(f"MCP Composer startup failed for project {project_id}:")
|
|
767
|
+
if exit_code is not None:
|
|
768
|
+
await logger.aerror(f" - Process died with exit code: {exit_code}")
|
|
769
|
+
if pid is not None:
|
|
770
|
+
await logger.aerror(f" - Process is running (PID: {pid}) but failed to bind to port {port}")
|
|
771
|
+
await logger.aerror(f" - Target: {host}:{port}")
|
|
772
|
+
|
|
773
|
+
# Obfuscate secrets in command before logging
|
|
774
|
+
safe_cmd = self._obfuscate_command_secrets(cmd)
|
|
775
|
+
await logger.aerror(f" - Command: {' '.join(safe_cmd)}")
|
|
776
|
+
|
|
777
|
+
if stderr.strip():
|
|
778
|
+
await logger.aerror(f" - Error output: {stderr.strip()}")
|
|
779
|
+
if stdout.strip():
|
|
780
|
+
await logger.aerror(f" - Standard output: {stdout.strip()}")
|
|
781
|
+
if error_msg:
|
|
782
|
+
await logger.aerror(f" - Error message: {error_msg}")
|
|
783
|
+
|
|
784
|
+
def _validate_oauth_settings(self, auth_config: dict[str, Any]) -> None:
|
|
785
|
+
"""Validate that all required OAuth settings are present and non-empty.
|
|
786
|
+
|
|
787
|
+
Raises:
|
|
788
|
+
MCPComposerConfigError: If any required OAuth field is missing or empty
|
|
789
|
+
"""
|
|
790
|
+
if auth_config.get("auth_type") != "oauth":
|
|
791
|
+
return
|
|
792
|
+
|
|
793
|
+
required_fields = [
|
|
794
|
+
"oauth_host",
|
|
795
|
+
"oauth_port",
|
|
796
|
+
"oauth_server_url",
|
|
797
|
+
"oauth_auth_url",
|
|
798
|
+
"oauth_token_url",
|
|
799
|
+
"oauth_client_id",
|
|
800
|
+
"oauth_client_secret",
|
|
801
|
+
]
|
|
802
|
+
|
|
803
|
+
missing_fields = []
|
|
804
|
+
empty_fields = []
|
|
805
|
+
|
|
806
|
+
for field in required_fields:
|
|
807
|
+
value = auth_config.get(field)
|
|
808
|
+
if value is None:
|
|
809
|
+
missing_fields.append(field)
|
|
810
|
+
elif not str(value).strip():
|
|
811
|
+
empty_fields.append(field)
|
|
812
|
+
|
|
813
|
+
error_parts = []
|
|
814
|
+
if missing_fields:
|
|
815
|
+
error_parts.append(f"Missing required fields: {', '.join(missing_fields)}")
|
|
816
|
+
if empty_fields:
|
|
817
|
+
error_parts.append(f"Empty required fields: {', '.join(empty_fields)}")
|
|
818
|
+
|
|
819
|
+
if error_parts:
|
|
820
|
+
config_error_msg = f"Invalid OAuth configuration: {'; '.join(error_parts)}"
|
|
821
|
+
raise MCPComposerConfigError(config_error_msg)
|
|
822
|
+
|
|
823
|
+
@staticmethod
|
|
824
|
+
def _normalize_config_value(value: Any) -> Any:
|
|
825
|
+
"""Normalize a config value (None or empty string becomes None).
|
|
826
|
+
|
|
827
|
+
Args:
|
|
828
|
+
value: The value to normalize
|
|
829
|
+
|
|
830
|
+
Returns:
|
|
831
|
+
None if value is None or empty string, otherwise the value
|
|
832
|
+
"""
|
|
833
|
+
return None if (value is None or value == "") else value
|
|
834
|
+
|
|
835
|
+
def _has_auth_config_changed(self, existing_auth: dict[str, Any] | None, new_auth: dict[str, Any] | None) -> bool:
|
|
836
|
+
"""Check if auth configuration has changed in a way that requires restart."""
|
|
837
|
+
if not existing_auth and not new_auth:
|
|
838
|
+
return False
|
|
839
|
+
|
|
840
|
+
if not existing_auth or not new_auth:
|
|
841
|
+
return True
|
|
842
|
+
|
|
843
|
+
auth_type = new_auth.get("auth_type", "")
|
|
844
|
+
|
|
845
|
+
# Auth type changed?
|
|
846
|
+
if existing_auth.get("auth_type") != auth_type:
|
|
847
|
+
return True
|
|
848
|
+
|
|
849
|
+
# Define which fields to check for each auth type
|
|
850
|
+
fields_to_check = []
|
|
851
|
+
if auth_type == "oauth":
|
|
852
|
+
# Get all oauth_* fields plus host/port from both configs
|
|
853
|
+
all_keys = set(existing_auth.keys()) | set(new_auth.keys())
|
|
854
|
+
fields_to_check = [k for k in all_keys if k.startswith("oauth_") or k in ["host", "port"]]
|
|
855
|
+
elif auth_type == "apikey":
|
|
856
|
+
fields_to_check = ["api_key"]
|
|
857
|
+
|
|
858
|
+
# Compare relevant fields
|
|
859
|
+
for field in fields_to_check:
|
|
860
|
+
old_normalized = self._normalize_config_value(existing_auth.get(field))
|
|
861
|
+
new_normalized = self._normalize_config_value(new_auth.get(field))
|
|
862
|
+
|
|
863
|
+
if old_normalized != new_normalized:
|
|
864
|
+
return True
|
|
865
|
+
|
|
866
|
+
return False
|
|
867
|
+
|
|
868
|
+
def _obfuscate_command_secrets(self, cmd: list[str]) -> list[str]:
|
|
869
|
+
"""Obfuscate secrets in command arguments for safe logging.
|
|
870
|
+
|
|
871
|
+
Args:
|
|
872
|
+
cmd: List of command arguments
|
|
873
|
+
|
|
874
|
+
Returns:
|
|
875
|
+
List of command arguments with secrets replaced with ***REDACTED***
|
|
876
|
+
"""
|
|
877
|
+
safe_cmd = []
|
|
878
|
+
i = 0
|
|
879
|
+
|
|
880
|
+
while i < len(cmd):
|
|
881
|
+
arg = cmd[i]
|
|
882
|
+
|
|
883
|
+
# Check if this is --env followed by a secret key
|
|
884
|
+
if arg == "--env" and i + 2 < len(cmd):
|
|
885
|
+
env_key = cmd[i + 1]
|
|
886
|
+
env_value = cmd[i + 2]
|
|
887
|
+
|
|
888
|
+
if any(secret in env_key.lower() for secret in ["secret", "key", "token"]):
|
|
889
|
+
# Redact the value
|
|
890
|
+
safe_cmd.extend([arg, env_key, "***REDACTED***"])
|
|
891
|
+
i += 3 # Skip all three: --env, key, and value
|
|
892
|
+
continue
|
|
893
|
+
|
|
894
|
+
# Not a secret, keep as-is
|
|
895
|
+
safe_cmd.extend([arg, env_key, env_value])
|
|
896
|
+
i += 3
|
|
897
|
+
continue
|
|
898
|
+
|
|
899
|
+
# Regular argument
|
|
900
|
+
safe_cmd.append(arg)
|
|
901
|
+
i += 1
|
|
902
|
+
|
|
903
|
+
return safe_cmd
|
|
904
|
+
|
|
905
|
+
def _extract_error_message(
|
|
906
|
+
self, stdout_content: str, stderr_content: str, oauth_server_url: str | None = None
|
|
907
|
+
) -> str:
|
|
908
|
+
"""Attempts to extract a user-friendly error message from subprocess output.
|
|
909
|
+
|
|
910
|
+
Args:
|
|
911
|
+
stdout_content: Standard output from the subprocess
|
|
912
|
+
stderr_content: Standard error from the subprocess
|
|
913
|
+
oauth_server_url: OAuth server URL
|
|
914
|
+
|
|
915
|
+
Returns:
|
|
916
|
+
User-friendly error message or a generic message if no specific pattern is found
|
|
917
|
+
"""
|
|
918
|
+
# Combine both outputs and clean them up
|
|
919
|
+
combined_output = (stderr_content + "\n" + stdout_content).strip()
|
|
920
|
+
if not oauth_server_url:
|
|
921
|
+
oauth_server_url = "OAuth server URL"
|
|
922
|
+
|
|
923
|
+
# Common error patterns with user-friendly messages
|
|
924
|
+
error_patterns = [
|
|
925
|
+
(r"address already in use", f"Address {oauth_server_url} is already in use."),
|
|
926
|
+
(r"permission denied", f"Permission denied starting MCP Composer on address {oauth_server_url}."),
|
|
927
|
+
(
|
|
928
|
+
r"connection refused",
|
|
929
|
+
f"Connection refused on address {oauth_server_url}. The address may be blocked or unavailable.",
|
|
930
|
+
),
|
|
931
|
+
(
|
|
932
|
+
r"bind.*failed",
|
|
933
|
+
f"Failed to bind to address {oauth_server_url}. The address may be in use or unavailable.",
|
|
934
|
+
),
|
|
935
|
+
(r"timeout", "MCP Composer startup timed out. Please try again."),
|
|
936
|
+
(r"invalid.*configuration", "Invalid MCP Composer configuration. Please check your settings."),
|
|
937
|
+
(r"oauth.*error", "OAuth configuration error. Please check your OAuth settings."),
|
|
938
|
+
(r"authentication.*failed", "Authentication failed. Please check your credentials."),
|
|
939
|
+
]
|
|
940
|
+
|
|
941
|
+
# Check for specific error patterns first
|
|
942
|
+
for pattern, friendly_msg in error_patterns:
|
|
943
|
+
if re.search(pattern, combined_output, re.IGNORECASE):
|
|
944
|
+
return friendly_msg
|
|
945
|
+
|
|
946
|
+
return GENERIC_STARTUP_ERROR_MSG
|
|
947
|
+
|
|
948
|
+
@require_composer_enabled
|
|
949
|
+
async def start_project_composer(
|
|
950
|
+
self,
|
|
951
|
+
project_id: str,
|
|
952
|
+
sse_url: str,
|
|
953
|
+
auth_config: dict[str, Any] | None,
|
|
954
|
+
max_retries: int = 3,
|
|
955
|
+
max_startup_checks: int = 40,
|
|
956
|
+
startup_delay: float = 2.0,
|
|
957
|
+
) -> None:
|
|
958
|
+
"""Start an MCP Composer instance for a specific project.
|
|
959
|
+
|
|
960
|
+
Args:
|
|
961
|
+
project_id: The project ID
|
|
962
|
+
sse_url: The SSE URL to connect to
|
|
963
|
+
auth_config: Authentication configuration
|
|
964
|
+
max_retries: Maximum number of retry attempts (default: 3)
|
|
965
|
+
max_startup_checks: Number of checks per retry attempt (default: 40)
|
|
966
|
+
startup_delay: Delay between checks in seconds (default: 2.0)
|
|
967
|
+
|
|
968
|
+
Raises:
|
|
969
|
+
MCPComposerError: Various specific errors if startup fails
|
|
970
|
+
"""
|
|
971
|
+
# Cancel any active start operation for this project
|
|
972
|
+
if project_id in self._active_start_tasks:
|
|
973
|
+
active_task = self._active_start_tasks[project_id]
|
|
974
|
+
if not active_task.done():
|
|
975
|
+
await logger.adebug(f"Cancelling previous MCP Composer start operation for project {project_id}")
|
|
976
|
+
active_task.cancel()
|
|
977
|
+
try:
|
|
978
|
+
await active_task
|
|
979
|
+
except asyncio.CancelledError:
|
|
980
|
+
await logger.adebug(f"Previous start operation for project {project_id} cancelled successfully")
|
|
981
|
+
finally:
|
|
982
|
+
# Clean up the cancelled task from tracking
|
|
983
|
+
del self._active_start_tasks[project_id]
|
|
984
|
+
|
|
985
|
+
# Create and track the current task
|
|
986
|
+
current_task = asyncio.current_task()
|
|
987
|
+
if not current_task:
|
|
988
|
+
await logger.awarning(
|
|
989
|
+
f"Could not get current task for project {project_id}. "
|
|
990
|
+
f"Concurrent start operations may not be properly cancelled."
|
|
991
|
+
)
|
|
992
|
+
else:
|
|
993
|
+
self._active_start_tasks[project_id] = current_task
|
|
994
|
+
|
|
995
|
+
try:
|
|
996
|
+
await self._do_start_project_composer(
|
|
997
|
+
project_id, sse_url, auth_config, max_retries, max_startup_checks, startup_delay
|
|
998
|
+
)
|
|
999
|
+
finally:
|
|
1000
|
+
# Clean up the task reference when done
|
|
1001
|
+
if project_id in self._active_start_tasks and self._active_start_tasks[project_id] == current_task:
|
|
1002
|
+
del self._active_start_tasks[project_id]
|
|
1003
|
+
|
|
1004
|
+
async def _do_start_project_composer(
|
|
1005
|
+
self,
|
|
1006
|
+
project_id: str,
|
|
1007
|
+
sse_url: str,
|
|
1008
|
+
auth_config: dict[str, Any] | None,
|
|
1009
|
+
max_retries: int = 3,
|
|
1010
|
+
max_startup_checks: int = 40,
|
|
1011
|
+
startup_delay: float = 2.0,
|
|
1012
|
+
) -> None:
|
|
1013
|
+
"""Internal method to start an MCP Composer instance.
|
|
1014
|
+
|
|
1015
|
+
Args:
|
|
1016
|
+
project_id: The project ID
|
|
1017
|
+
sse_url: The SSE URL to connect to
|
|
1018
|
+
auth_config: Authentication configuration
|
|
1019
|
+
max_retries: Maximum number of retry attempts (default: 3)
|
|
1020
|
+
max_startup_checks: Number of checks per retry attempt (default: 40)
|
|
1021
|
+
startup_delay: Delay between checks in seconds (default: 2.0)
|
|
1022
|
+
|
|
1023
|
+
Raises:
|
|
1024
|
+
MCPComposerError: Various specific errors if startup fails
|
|
1025
|
+
"""
|
|
1026
|
+
if not auth_config:
|
|
1027
|
+
no_auth_error_msg = "No auth settings provided"
|
|
1028
|
+
raise MCPComposerConfigError(no_auth_error_msg, project_id)
|
|
1029
|
+
|
|
1030
|
+
# Validate OAuth settings early to provide clear error messages
|
|
1031
|
+
self._validate_oauth_settings(auth_config)
|
|
1032
|
+
|
|
1033
|
+
project_host = auth_config.get("oauth_host") if auth_config else "unknown"
|
|
1034
|
+
project_port = auth_config.get("oauth_port") if auth_config else "unknown"
|
|
1035
|
+
await logger.adebug(f"Starting MCP Composer for project {project_id} on {project_host}:{project_port}")
|
|
1036
|
+
|
|
1037
|
+
# Use a per-project lock to prevent race conditions
|
|
1038
|
+
if project_id not in self._start_locks:
|
|
1039
|
+
self._start_locks[project_id] = asyncio.Lock()
|
|
1040
|
+
|
|
1041
|
+
async with self._start_locks[project_id]:
|
|
1042
|
+
# Check if already running (double-check after acquiring lock)
|
|
1043
|
+
project_port_str = auth_config.get("oauth_port")
|
|
1044
|
+
if not project_port_str:
|
|
1045
|
+
no_port_error_msg = "No OAuth port provided"
|
|
1046
|
+
raise MCPComposerConfigError(no_port_error_msg, project_id)
|
|
1047
|
+
|
|
1048
|
+
try:
|
|
1049
|
+
project_port = int(project_port_str)
|
|
1050
|
+
except (ValueError, TypeError) as e:
|
|
1051
|
+
port_error_msg = f"Invalid OAuth port: {project_port_str}"
|
|
1052
|
+
raise MCPComposerConfigError(port_error_msg, project_id) from e
|
|
1053
|
+
|
|
1054
|
+
project_host = auth_config.get("oauth_host")
|
|
1055
|
+
if not project_host:
|
|
1056
|
+
no_host_error_msg = "No OAuth host provided"
|
|
1057
|
+
raise MCPComposerConfigError(no_host_error_msg, project_id)
|
|
1058
|
+
|
|
1059
|
+
if project_id in self.project_composers:
|
|
1060
|
+
composer_info = self.project_composers[project_id]
|
|
1061
|
+
process = composer_info.get("process")
|
|
1062
|
+
existing_auth = composer_info.get("auth_config", {})
|
|
1063
|
+
existing_port = composer_info.get("port")
|
|
1064
|
+
|
|
1065
|
+
# Check if process is still running
|
|
1066
|
+
if process and process.poll() is None:
|
|
1067
|
+
# Process is running - only restart if config changed
|
|
1068
|
+
auth_changed = self._has_auth_config_changed(existing_auth, auth_config)
|
|
1069
|
+
|
|
1070
|
+
if auth_changed:
|
|
1071
|
+
await logger.adebug(f"Config changed for project {project_id}, restarting MCP Composer")
|
|
1072
|
+
await self._do_stop_project_composer(project_id)
|
|
1073
|
+
else:
|
|
1074
|
+
await logger.adebug(
|
|
1075
|
+
f"MCP Composer already running for project {project_id} with current config"
|
|
1076
|
+
)
|
|
1077
|
+
return # Already running with correct config
|
|
1078
|
+
else:
|
|
1079
|
+
# Process died or never started properly, restart it
|
|
1080
|
+
await logger.adebug(f"MCP Composer process died for project {project_id}, restarting")
|
|
1081
|
+
await self._do_stop_project_composer(project_id)
|
|
1082
|
+
# Also kill any process that might be using the old port
|
|
1083
|
+
if existing_port:
|
|
1084
|
+
try:
|
|
1085
|
+
await asyncio.wait_for(self._kill_process_on_port(existing_port), timeout=5.0)
|
|
1086
|
+
except asyncio.TimeoutError:
|
|
1087
|
+
await logger.aerror(f"Timeout while killing process on port {existing_port}")
|
|
1088
|
+
|
|
1089
|
+
# Retry loop: try starting the process multiple times
|
|
1090
|
+
last_error = None
|
|
1091
|
+
try:
|
|
1092
|
+
# Before first attempt, try to kill any zombie MCP Composer processes
|
|
1093
|
+
# This is a best-effort operation - don't fail startup if it errors
|
|
1094
|
+
try:
|
|
1095
|
+
await logger.adebug(
|
|
1096
|
+
f"Checking for zombie MCP Composer processes on port {project_port} before startup..."
|
|
1097
|
+
)
|
|
1098
|
+
zombies_killed = await self._kill_zombie_mcp_processes(project_port)
|
|
1099
|
+
if zombies_killed:
|
|
1100
|
+
await logger.adebug(f"Killed zombie processes, port {project_port} should now be free")
|
|
1101
|
+
except Exception as zombie_error: # noqa: BLE001
|
|
1102
|
+
# Log but continue - zombie cleanup is optional
|
|
1103
|
+
await logger.awarning(
|
|
1104
|
+
f"Failed to check/kill zombie processes (non-fatal): {zombie_error}. Continuing with startup..."
|
|
1105
|
+
)
|
|
1106
|
+
|
|
1107
|
+
# Ensure port is available (only kill untracked processes)
|
|
1108
|
+
try:
|
|
1109
|
+
await self._ensure_port_available(project_port, project_id)
|
|
1110
|
+
except (MCPComposerPortError, MCPComposerConfigError) as e:
|
|
1111
|
+
# Port/config error before starting - store and raise immediately (no retries)
|
|
1112
|
+
self._last_errors[project_id] = e.message
|
|
1113
|
+
raise
|
|
1114
|
+
for retry_attempt in range(1, max_retries + 1):
|
|
1115
|
+
try:
|
|
1116
|
+
await logger.adebug(
|
|
1117
|
+
f"Starting MCP Composer for project {project_id} (attempt {retry_attempt}/{max_retries})"
|
|
1118
|
+
)
|
|
1119
|
+
|
|
1120
|
+
# Re-check port availability before each attempt to prevent race conditions
|
|
1121
|
+
if retry_attempt > 1:
|
|
1122
|
+
await logger.adebug(f"Re-checking port {project_port} availability before retry...")
|
|
1123
|
+
await self._ensure_port_available(project_port, project_id)
|
|
1124
|
+
|
|
1125
|
+
process = await self._start_project_composer_process(
|
|
1126
|
+
project_id,
|
|
1127
|
+
project_host,
|
|
1128
|
+
project_port,
|
|
1129
|
+
sse_url,
|
|
1130
|
+
auth_config,
|
|
1131
|
+
max_startup_checks,
|
|
1132
|
+
startup_delay,
|
|
1133
|
+
)
|
|
1134
|
+
|
|
1135
|
+
except MCPComposerError as e:
|
|
1136
|
+
last_error = e
|
|
1137
|
+
await logger.aerror(
|
|
1138
|
+
f"MCP Composer startup attempt {retry_attempt}/{max_retries} failed "
|
|
1139
|
+
f"for project {project_id}: {e.message}"
|
|
1140
|
+
)
|
|
1141
|
+
|
|
1142
|
+
# For config/port errors, don't retry - fail immediately
|
|
1143
|
+
if isinstance(e, (MCPComposerConfigError, MCPComposerPortError)):
|
|
1144
|
+
await logger.aerror(
|
|
1145
|
+
f"Configuration or port error for project {project_id}, not retrying: {e.message}"
|
|
1146
|
+
)
|
|
1147
|
+
raise # Re-raise to exit retry loop immediately
|
|
1148
|
+
|
|
1149
|
+
# Clean up any partially started process before retrying
|
|
1150
|
+
if project_id in self.project_composers:
|
|
1151
|
+
await self._do_stop_project_composer(project_id)
|
|
1152
|
+
|
|
1153
|
+
# If not the last attempt, wait and try to clean up zombie processes
|
|
1154
|
+
if retry_attempt < max_retries:
|
|
1155
|
+
await logger.adebug(f"Waiting 2 seconds before retry attempt {retry_attempt + 1}...")
|
|
1156
|
+
await asyncio.sleep(2)
|
|
1157
|
+
|
|
1158
|
+
# On Windows, try to kill any zombie MCP Composer processes for this port
|
|
1159
|
+
# This is a best-effort operation - don't fail retry if it errors
|
|
1160
|
+
try:
|
|
1161
|
+
msg = f"Checking for zombie MCP Composer processes on port {project_port}"
|
|
1162
|
+
await logger.adebug(msg)
|
|
1163
|
+
zombies_killed = await self._kill_zombie_mcp_processes(project_port)
|
|
1164
|
+
if zombies_killed:
|
|
1165
|
+
await logger.adebug(f"Killed zombie processes, port {project_port} should be free")
|
|
1166
|
+
except Exception as retry_zombie_error: # noqa: BLE001
|
|
1167
|
+
# Log but continue - zombie cleanup is optional
|
|
1168
|
+
msg = f"Failed to check/kill zombie processes during retry: {retry_zombie_error}"
|
|
1169
|
+
await logger.awarning(msg)
|
|
1170
|
+
|
|
1171
|
+
else:
|
|
1172
|
+
# Success! Store the composer info and register the port and PID
|
|
1173
|
+
self.project_composers[project_id] = {
|
|
1174
|
+
"process": process,
|
|
1175
|
+
"host": project_host,
|
|
1176
|
+
"port": project_port,
|
|
1177
|
+
"sse_url": sse_url,
|
|
1178
|
+
"auth_config": auth_config,
|
|
1179
|
+
}
|
|
1180
|
+
self._port_to_project[project_port] = project_id
|
|
1181
|
+
self._pid_to_project[process.pid] = project_id
|
|
1182
|
+
# Clear any previous error on success
|
|
1183
|
+
self.clear_last_error(project_id)
|
|
1184
|
+
|
|
1185
|
+
await logger.adebug(
|
|
1186
|
+
f"MCP Composer started for project {project_id} on port {project_port} "
|
|
1187
|
+
f"(PID: {process.pid}) after {retry_attempt} attempt(s)"
|
|
1188
|
+
)
|
|
1189
|
+
return # Success!
|
|
1190
|
+
|
|
1191
|
+
# All retries failed, raise the last error
|
|
1192
|
+
if last_error:
|
|
1193
|
+
await logger.aerror(
|
|
1194
|
+
f"MCP Composer failed to start for project {project_id} after {max_retries} attempts"
|
|
1195
|
+
)
|
|
1196
|
+
# Store the error message for later retrieval
|
|
1197
|
+
self._last_errors[project_id] = last_error.message
|
|
1198
|
+
raise last_error
|
|
1199
|
+
|
|
1200
|
+
except asyncio.CancelledError:
|
|
1201
|
+
# Operation was cancelled, clean up any started process
|
|
1202
|
+
await logger.adebug(f"MCP Composer start operation for project {project_id} was cancelled")
|
|
1203
|
+
if project_id in self.project_composers:
|
|
1204
|
+
await self._do_stop_project_composer(project_id)
|
|
1205
|
+
raise # Re-raise to propagate cancellation
|
|
1206
|
+
|
|
1207
|
+
async def _start_project_composer_process(
|
|
1208
|
+
self,
|
|
1209
|
+
project_id: str,
|
|
1210
|
+
host: str,
|
|
1211
|
+
port: int,
|
|
1212
|
+
sse_url: str,
|
|
1213
|
+
auth_config: dict[str, Any] | None = None,
|
|
1214
|
+
max_startup_checks: int = 40,
|
|
1215
|
+
startup_delay: float = 2.0,
|
|
1216
|
+
) -> subprocess.Popen:
|
|
1217
|
+
"""Start the MCP Composer subprocess for a specific project.
|
|
1218
|
+
|
|
1219
|
+
Args:
|
|
1220
|
+
project_id: The project ID
|
|
1221
|
+
host: Host to bind to
|
|
1222
|
+
port: Port to bind to
|
|
1223
|
+
sse_url: SSE URL to connect to
|
|
1224
|
+
auth_config: Authentication configuration
|
|
1225
|
+
max_startup_checks: Number of port binding checks (default: 40)
|
|
1226
|
+
startup_delay: Delay between checks in seconds (default: 2.0)
|
|
1227
|
+
|
|
1228
|
+
Returns:
|
|
1229
|
+
The started subprocess
|
|
1230
|
+
|
|
1231
|
+
Raises:
|
|
1232
|
+
MCPComposerStartupError: If startup fails
|
|
1233
|
+
"""
|
|
1234
|
+
settings = get_settings_service().settings
|
|
1235
|
+
cmd = [
|
|
1236
|
+
"uvx",
|
|
1237
|
+
f"mcp-composer{settings.mcp_composer_version}",
|
|
1238
|
+
"--port",
|
|
1239
|
+
str(port),
|
|
1240
|
+
"--host",
|
|
1241
|
+
host,
|
|
1242
|
+
"--mode",
|
|
1243
|
+
"sse",
|
|
1244
|
+
"--sse-url",
|
|
1245
|
+
sse_url,
|
|
1246
|
+
"--disable-composer-tools",
|
|
1247
|
+
]
|
|
1248
|
+
|
|
1249
|
+
# Set environment variables
|
|
1250
|
+
env = os.environ.copy()
|
|
1251
|
+
|
|
1252
|
+
oauth_server_url = auth_config.get("oauth_server_url") if auth_config else None
|
|
1253
|
+
if auth_config:
|
|
1254
|
+
auth_type = auth_config.get("auth_type")
|
|
1255
|
+
|
|
1256
|
+
if auth_type == "oauth":
|
|
1257
|
+
cmd.extend(["--auth_type", "oauth"])
|
|
1258
|
+
|
|
1259
|
+
# Add OAuth environment variables as command line arguments
|
|
1260
|
+
cmd.extend(["--env", "ENABLE_OAUTH", "True"])
|
|
1261
|
+
|
|
1262
|
+
# Map auth config to environment variables for OAuth
|
|
1263
|
+
# Note: oauth_host and oauth_port are passed both via --host/--port CLI args
|
|
1264
|
+
# (for server binding) and as environment variables (for OAuth flow)
|
|
1265
|
+
oauth_env_mapping = {
|
|
1266
|
+
"oauth_host": "OAUTH_HOST",
|
|
1267
|
+
"oauth_port": "OAUTH_PORT",
|
|
1268
|
+
"oauth_server_url": "OAUTH_SERVER_URL",
|
|
1269
|
+
"oauth_callback_path": "OAUTH_CALLBACK_PATH",
|
|
1270
|
+
"oauth_client_id": "OAUTH_CLIENT_ID",
|
|
1271
|
+
"oauth_client_secret": "OAUTH_CLIENT_SECRET", # pragma: allowlist secret
|
|
1272
|
+
"oauth_auth_url": "OAUTH_AUTH_URL",
|
|
1273
|
+
"oauth_token_url": "OAUTH_TOKEN_URL",
|
|
1274
|
+
"oauth_mcp_scope": "OAUTH_MCP_SCOPE",
|
|
1275
|
+
"oauth_provider_scope": "OAUTH_PROVIDER_SCOPE",
|
|
1276
|
+
}
|
|
1277
|
+
|
|
1278
|
+
# Add environment variables as command line arguments
|
|
1279
|
+
# Only set non-empty values to avoid Pydantic validation errors
|
|
1280
|
+
for config_key, env_key in oauth_env_mapping.items():
|
|
1281
|
+
value = auth_config.get(config_key)
|
|
1282
|
+
if value is not None and str(value).strip():
|
|
1283
|
+
cmd.extend(["--env", env_key, str(value)])
|
|
1284
|
+
|
|
1285
|
+
# Log the command being executed (with secrets obfuscated)
|
|
1286
|
+
safe_cmd = self._obfuscate_command_secrets(cmd)
|
|
1287
|
+
await logger.adebug(f"Starting MCP Composer with command: {' '.join(safe_cmd)}")
|
|
1288
|
+
|
|
1289
|
+
# Start the subprocess with both stdout and stderr captured
|
|
1290
|
+
# On Windows, use temp files to avoid pipe buffering issues that can cause process to hang
|
|
1291
|
+
stdout_handle: int | typing.IO[bytes] = subprocess.PIPE
|
|
1292
|
+
stderr_handle: int | typing.IO[bytes] = subprocess.PIPE
|
|
1293
|
+
stdout_file = None
|
|
1294
|
+
stderr_file = None
|
|
1295
|
+
|
|
1296
|
+
if platform.system() == "Windows":
|
|
1297
|
+
# Create temp files for stdout/stderr on Windows to avoid pipe deadlocks
|
|
1298
|
+
# Note: We intentionally don't use context manager as we need files to persist
|
|
1299
|
+
# for the subprocess and be cleaned up manually later
|
|
1300
|
+
stdout_file = tempfile.NamedTemporaryFile( # noqa: SIM115
|
|
1301
|
+
mode="w+b", delete=False, prefix=f"mcp_composer_{project_id}_stdout_", suffix=".log"
|
|
1302
|
+
)
|
|
1303
|
+
stderr_file = tempfile.NamedTemporaryFile( # noqa: SIM115
|
|
1304
|
+
mode="w+b", delete=False, prefix=f"mcp_composer_{project_id}_stderr_", suffix=".log"
|
|
1305
|
+
)
|
|
1306
|
+
stdout_handle = stdout_file
|
|
1307
|
+
stderr_handle = stderr_file
|
|
1308
|
+
stdout_name = stdout_file.name
|
|
1309
|
+
stderr_name = stderr_file.name
|
|
1310
|
+
await logger.adebug(f"Using temp files for MCP Composer logs: stdout={stdout_name}, stderr={stderr_name}")
|
|
1311
|
+
|
|
1312
|
+
process = subprocess.Popen(cmd, env=env, stdout=stdout_handle, stderr=stderr_handle) # noqa: ASYNC220, S603
|
|
1313
|
+
|
|
1314
|
+
# Monitor the process startup with multiple checks
|
|
1315
|
+
process_running = False
|
|
1316
|
+
port_bound = False
|
|
1317
|
+
|
|
1318
|
+
await logger.adebug(
|
|
1319
|
+
f"MCP Composer process started with PID {process.pid}, monitoring startup for project {project_id}..."
|
|
1320
|
+
)
|
|
1321
|
+
|
|
1322
|
+
try:
|
|
1323
|
+
for check in range(max_startup_checks):
|
|
1324
|
+
await asyncio.sleep(startup_delay)
|
|
1325
|
+
|
|
1326
|
+
# Check if process is still running
|
|
1327
|
+
poll_result = process.poll()
|
|
1328
|
+
|
|
1329
|
+
startup_error_msg = None
|
|
1330
|
+
if poll_result is not None:
|
|
1331
|
+
# Process terminated, get the error output
|
|
1332
|
+
(
|
|
1333
|
+
stdout_content,
|
|
1334
|
+
stderr_content,
|
|
1335
|
+
startup_error_msg,
|
|
1336
|
+
) = await self._read_process_output_and_extract_error(
|
|
1337
|
+
process, oauth_server_url, stdout_file=stdout_file, stderr_file=stderr_file
|
|
1338
|
+
)
|
|
1339
|
+
await self._log_startup_error_details(
|
|
1340
|
+
project_id, cmd, host, port, stdout_content, stderr_content, startup_error_msg, poll_result
|
|
1341
|
+
)
|
|
1342
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
1343
|
+
|
|
1344
|
+
# Process is still running, check if port is bound
|
|
1345
|
+
port_bound = not self._is_port_available(port)
|
|
1346
|
+
|
|
1347
|
+
if port_bound:
|
|
1348
|
+
await logger.adebug(
|
|
1349
|
+
f"MCP Composer for project {project_id} bound to port {port} "
|
|
1350
|
+
f"(check {check + 1}/{max_startup_checks})"
|
|
1351
|
+
)
|
|
1352
|
+
process_running = True
|
|
1353
|
+
break
|
|
1354
|
+
await logger.adebug(
|
|
1355
|
+
f"MCP Composer for project {project_id} not yet bound to port {port} "
|
|
1356
|
+
f"(check {check + 1}/{max_startup_checks})"
|
|
1357
|
+
)
|
|
1358
|
+
|
|
1359
|
+
# Try to read any available stderr/stdout without blocking to see what's happening
|
|
1360
|
+
await self._read_stream_non_blocking(process.stderr, "stderr")
|
|
1361
|
+
await self._read_stream_non_blocking(process.stdout, "stdout")
|
|
1362
|
+
|
|
1363
|
+
except asyncio.CancelledError:
|
|
1364
|
+
# Operation was cancelled, kill the process and cleanup
|
|
1365
|
+
await logger.adebug(
|
|
1366
|
+
f"MCP Composer process startup cancelled for project {project_id}, terminating process {process.pid}"
|
|
1367
|
+
)
|
|
1368
|
+
try:
|
|
1369
|
+
process.terminate()
|
|
1370
|
+
# Wait for graceful termination with timeout
|
|
1371
|
+
try:
|
|
1372
|
+
await asyncio.wait_for(asyncio.to_thread(process.wait), timeout=2.0)
|
|
1373
|
+
except asyncio.TimeoutError:
|
|
1374
|
+
# Force kill if graceful termination times out
|
|
1375
|
+
await logger.adebug(f"Process {process.pid} did not terminate gracefully, force killing")
|
|
1376
|
+
await asyncio.to_thread(process.kill)
|
|
1377
|
+
await asyncio.to_thread(process.wait)
|
|
1378
|
+
except Exception as e: # noqa: BLE001
|
|
1379
|
+
await logger.adebug(f"Error terminating process during cancellation: {e}")
|
|
1380
|
+
raise # Re-raise to propagate cancellation
|
|
1381
|
+
|
|
1382
|
+
# After all checks
|
|
1383
|
+
if not process_running or not port_bound:
|
|
1384
|
+
# Get comprehensive error information
|
|
1385
|
+
poll_result = process.poll()
|
|
1386
|
+
|
|
1387
|
+
if poll_result is not None:
|
|
1388
|
+
# Process died
|
|
1389
|
+
stdout_content, stderr_content, startup_error_msg = await self._read_process_output_and_extract_error(
|
|
1390
|
+
process, oauth_server_url, stdout_file=stdout_file, stderr_file=stderr_file
|
|
1391
|
+
)
|
|
1392
|
+
await self._log_startup_error_details(
|
|
1393
|
+
project_id, cmd, host, port, stdout_content, stderr_content, startup_error_msg, poll_result
|
|
1394
|
+
)
|
|
1395
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
1396
|
+
# Process running but port not bound
|
|
1397
|
+
await logger.aerror(
|
|
1398
|
+
f" - Checked {max_startup_checks} times over {max_startup_checks * startup_delay} seconds"
|
|
1399
|
+
)
|
|
1400
|
+
|
|
1401
|
+
# Get any available output before terminating
|
|
1402
|
+
process.terminate()
|
|
1403
|
+
stdout_content, stderr_content, startup_error_msg = await self._read_process_output_and_extract_error(
|
|
1404
|
+
process, oauth_server_url, stdout_file=stdout_file, stderr_file=stderr_file
|
|
1405
|
+
)
|
|
1406
|
+
await self._log_startup_error_details(
|
|
1407
|
+
project_id, cmd, host, port, stdout_content, stderr_content, startup_error_msg, pid=process.pid
|
|
1408
|
+
)
|
|
1409
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
1410
|
+
|
|
1411
|
+
# Close the pipes/files if everything is successful
|
|
1412
|
+
if stdout_file and stderr_file:
|
|
1413
|
+
# Clean up temp files on success
|
|
1414
|
+
try:
|
|
1415
|
+
stdout_file.close()
|
|
1416
|
+
stderr_file.close()
|
|
1417
|
+
Path(stdout_file.name).unlink()
|
|
1418
|
+
Path(stderr_file.name).unlink()
|
|
1419
|
+
except Exception as e: # noqa: BLE001
|
|
1420
|
+
await logger.adebug(f"Error cleaning up temp files on success: {e}")
|
|
1421
|
+
else:
|
|
1422
|
+
if process.stdout:
|
|
1423
|
+
process.stdout.close()
|
|
1424
|
+
if process.stderr:
|
|
1425
|
+
process.stderr.close()
|
|
1426
|
+
|
|
1427
|
+
return process
|
|
1428
|
+
|
|
1429
|
+
@require_composer_enabled
|
|
1430
|
+
def get_project_composer_port(self, project_id: str) -> int | None:
|
|
1431
|
+
"""Get the port number for a specific project's composer."""
|
|
1432
|
+
if project_id not in self.project_composers:
|
|
1433
|
+
return None
|
|
1434
|
+
return self.project_composers[project_id]["port"]
|
|
1435
|
+
|
|
1436
|
+
@require_composer_enabled
|
|
1437
|
+
async def teardown(self) -> None:
|
|
1438
|
+
"""Clean up resources when the service is torn down."""
|
|
1439
|
+
await logger.adebug("Tearing down MCP Composer service...")
|
|
1440
|
+
await self.stop()
|
|
1441
|
+
await logger.adebug("MCP Composer service teardown complete")
|