lfx-nightly 0.2.0.dev25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/_assets/component_index.json +1 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +375 -0
- lfx/base/agents/altk_base_agent.py +380 -0
- lfx/base/agents/altk_tool_wrappers.py +565 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +430 -0
- lfx/base/agents/utils.py +237 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +2584 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +810 -0
- lfx/base/data/docling_utils.py +338 -0
- lfx/base/data/storage_utils.py +192 -0
- lfx/base/data/utils.py +362 -0
- lfx/base/datastax/__init__.py +5 -0
- lfx/base/datastax/astradb_base.py +896 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/embeddings_class.py +113 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +21 -0
- lfx/base/io/text.py +22 -0
- lfx/base/knowledge_bases/__init__.py +3 -0
- lfx/base/knowledge_bases/knowledge_base_utils.py +137 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1659 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +51 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/cometapi_constants.py +54 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/google_generative_ai_model.py +38 -0
- lfx/base/models/groq_constants.py +150 -0
- lfx/base/models/groq_model_discovery.py +265 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +378 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +108 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +52 -0
- lfx/base/models/openai_constants.py +129 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/models/watsonx_constants.py +36 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +698 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +327 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +506 -0
- lfx/cli/script_loader.py +289 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +428 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +115 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/altk/__init__.py +34 -0
- lfx/components/altk/altk_agent.py +193 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_converse.py +195 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +130 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +169 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +169 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/cometapi/__init__.py +32 -0
- lfx/components/cometapi/cometapi.py +166 -0
- lfx/components/composio/__init__.py +222 -0
- lfx/components/composio/agentql_composio.py +11 -0
- lfx/components/composio/agiled_composio.py +11 -0
- lfx/components/composio/airtable_composio.py +11 -0
- lfx/components/composio/apollo_composio.py +11 -0
- lfx/components/composio/asana_composio.py +11 -0
- lfx/components/composio/attio_composio.py +11 -0
- lfx/components/composio/bitbucket_composio.py +11 -0
- lfx/components/composio/bolna_composio.py +11 -0
- lfx/components/composio/brightdata_composio.py +11 -0
- lfx/components/composio/calendly_composio.py +11 -0
- lfx/components/composio/canva_composio.py +11 -0
- lfx/components/composio/canvas_composio.py +11 -0
- lfx/components/composio/coda_composio.py +11 -0
- lfx/components/composio/composio_api.py +278 -0
- lfx/components/composio/contentful_composio.py +11 -0
- lfx/components/composio/digicert_composio.py +11 -0
- lfx/components/composio/discord_composio.py +11 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/elevenlabs_composio.py +11 -0
- lfx/components/composio/exa_composio.py +11 -0
- lfx/components/composio/figma_composio.py +11 -0
- lfx/components/composio/finage_composio.py +11 -0
- lfx/components/composio/firecrawl_composio.py +11 -0
- lfx/components/composio/fireflies_composio.py +11 -0
- lfx/components/composio/fixer_composio.py +11 -0
- lfx/components/composio/flexisign_composio.py +11 -0
- lfx/components/composio/freshdesk_composio.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlebigquery_composio.py +11 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googleclassroom_composio.py +11 -0
- lfx/components/composio/googledocs_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googlesheets_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/heygen_composio.py +11 -0
- lfx/components/composio/instagram_composio.py +11 -0
- lfx/components/composio/jira_composio.py +11 -0
- lfx/components/composio/jotform_composio.py +11 -0
- lfx/components/composio/klaviyo_composio.py +11 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/listennotes_composio.py +11 -0
- lfx/components/composio/mem0_composio.py +11 -0
- lfx/components/composio/miro_composio.py +11 -0
- lfx/components/composio/missive_composio.py +11 -0
- lfx/components/composio/notion_composio.py +11 -0
- lfx/components/composio/onedrive_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/pandadoc_composio.py +11 -0
- lfx/components/composio/peopledatalabs_composio.py +11 -0
- lfx/components/composio/perplexityai_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/serpapi_composio.py +11 -0
- lfx/components/composio/slack_composio.py +11 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/snowflake_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/tavily_composio.py +11 -0
- lfx/components/composio/timelinesai_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/wrike_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +108 -0
- lfx/components/crewai/hierarchical_crew.py +47 -0
- lfx/components/crewai/hierarchical_task.py +45 -0
- lfx/components/crewai/sequential_crew.py +53 -0
- lfx/components/crewai/sequential_task.py +74 -0
- lfx/components/crewai/sequential_task_agent.py +144 -0
- lfx/components/cuga/__init__.py +34 -0
- lfx/components/cuga/cuga_agent.py +730 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +114 -0
- lfx/components/data_source/__init__.py +58 -0
- lfx/components/data_source/api_request.py +577 -0
- lfx/components/data_source/csv_to_data.py +101 -0
- lfx/components/data_source/json_to_data.py +106 -0
- lfx/components/data_source/mock_data.py +398 -0
- lfx/components/data_source/news_search.py +166 -0
- lfx/components/data_source/rss.py +71 -0
- lfx/components/data_source/sql_executor.py +101 -0
- lfx/components/data_source/url.py +311 -0
- lfx/components/data_source/web_search.py +326 -0
- lfx/components/datastax/__init__.py +76 -0
- lfx/components/datastax/astradb_assistant_manager.py +307 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +288 -0
- lfx/components/datastax/astradb_graph.py +217 -0
- lfx/components/datastax/astradb_tool.py +378 -0
- lfx/components/datastax/astradb_vectorize.py +122 -0
- lfx/components/datastax/astradb_vectorstore.py +449 -0
- lfx/components/datastax/create_assistant.py +59 -0
- lfx/components/datastax/create_thread.py +33 -0
- lfx/components/datastax/dotenv.py +36 -0
- lfx/components/datastax/get_assistant.py +38 -0
- lfx/components/datastax/getenvvar.py +31 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +315 -0
- lfx/components/datastax/list_assistants.py +26 -0
- lfx/components/datastax/run.py +90 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +238 -0
- lfx/components/docling/docling_remote.py +195 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +789 -0
- lfx/components/elastic/opensearch_multimodal.py +1575 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +77 -0
- lfx/components/embeddings/text_embedder.py +65 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/files_and_knowledge/__init__.py +47 -0
- lfx/components/files_and_knowledge/directory.py +113 -0
- lfx/components/files_and_knowledge/file.py +841 -0
- lfx/components/files_and_knowledge/ingestion.py +694 -0
- lfx/components/files_and_knowledge/retrieval.py +264 -0
- lfx/components/files_and_knowledge/save_file.py +746 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/flow_controls/__init__.py +58 -0
- lfx/components/flow_controls/conditional_router.py +208 -0
- lfx/components/flow_controls/data_conditional_router.py +126 -0
- lfx/components/flow_controls/flow_tool.py +111 -0
- lfx/components/flow_controls/listen.py +29 -0
- lfx/components/flow_controls/loop.py +163 -0
- lfx/components/flow_controls/notify.py +88 -0
- lfx/components/flow_controls/pass_message.py +36 -0
- lfx/components/flow_controls/run_flow.py +108 -0
- lfx/components/flow_controls/sub_flow.py +115 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +193 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +144 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +143 -0
- lfx/components/helpers/__init__.py +154 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +199 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +207 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +40 -0
- lfx/components/input_output/chat.py +109 -0
- lfx/components/input_output/chat_output.py +184 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/input_output/webhook.py +56 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/knowledge_bases/__init__.py +89 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +175 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +100 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/llm_operations/__init__.py +46 -0
- lfx/components/llm_operations/batch_run.py +205 -0
- lfx/components/llm_operations/lambda_filter.py +218 -0
- lfx/components/llm_operations/llm_conditional_router.py +421 -0
- lfx/components/llm_operations/llm_selector.py +499 -0
- lfx/components/llm_operations/structured_output.py +244 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +133 -0
- lfx/components/logic/__init__.py +181 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +147 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +89 -0
- lfx/components/models_and_agents/__init__.py +49 -0
- lfx/components/models_and_agents/agent.py +644 -0
- lfx/components/models_and_agents/embedding_model.py +423 -0
- lfx/components/models_and_agents/language_model.py +398 -0
- lfx/components/models_and_agents/mcp_component.py +594 -0
- lfx/components/models_and_agents/memory.py +268 -0
- lfx/components/models_and_agents/prompt.py +67 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +151 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +548 -0
- lfx/components/ollama/ollama_embeddings.py +103 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +104 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +72 -0
- lfx/components/processing/alter_metadata.py +109 -0
- lfx/components/processing/combine_text.py +40 -0
- lfx/components/processing/converter.py +248 -0
- lfx/components/processing/create_data.py +111 -0
- lfx/components/processing/create_list.py +40 -0
- lfx/components/processing/data_operations.py +528 -0
- lfx/components/processing/data_to_dataframe.py +71 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/dataframe_to_toolset.py +259 -0
- lfx/components/processing/dynamic_create_data.py +357 -0
- lfx/components/processing/extract_key.py +54 -0
- lfx/components/processing/filter_data.py +43 -0
- lfx/components/processing/filter_data_values.py +89 -0
- lfx/components/processing/json_cleaner.py +104 -0
- lfx/components/processing/merge_data.py +91 -0
- lfx/components/processing/message_to_data.py +37 -0
- lfx/components/processing/output_parser.py +46 -0
- lfx/components/processing/parse_data.py +71 -0
- lfx/components/processing/parse_dataframe.py +69 -0
- lfx/components/processing/parse_json_data.py +91 -0
- lfx/components/processing/parser.py +148 -0
- lfx/components/processing/regex.py +83 -0
- lfx/components/processing/select_data.py +49 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/store_message.py +91 -0
- lfx/components/processing/update_data.py +161 -0
- lfx/components/prototypes/__init__.py +35 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +66 -0
- lfx/components/tools/calculator.py +109 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +328 -0
- lfx/components/tools/python_repl.py +98 -0
- lfx/components/tools/search_api.py +88 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +120 -0
- lfx/components/tools/tavily_search_tool.py +345 -0
- lfx/components/tools/wikidata_api.py +103 -0
- lfx/components/tools/wikipedia_api.py +50 -0
- lfx/components/tools/yahoo_finance.py +130 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +301 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +191 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/utilities/__init__.py +43 -0
- lfx/components/utilities/calculator_core.py +89 -0
- lfx/components/utilities/current_date.py +42 -0
- lfx/components/utilities/id_generator.py +42 -0
- lfx/components/utilities/python_repl_core.py +98 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +34 -0
- lfx/components/vectorstores/local_db.py +270 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/vlmrun/__init__.py +34 -0
- lfx/components/vlmrun/vlmrun_transcription.py +224 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +206 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +45 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +87 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1890 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +650 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +523 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +216 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +300 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2298 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +53 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +250 -0
- lfx/graph/utils.py +206 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +826 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +316 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +141 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +308 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +352 -0
- lfx/inputs/inputs.py +718 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +897 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +317 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +295 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +411 -0
- lfx/logging/__init__.py +11 -0
- lfx/logging/logger.py +24 -0
- lfx/memory/__init__.py +70 -0
- lfx/memory/stubs.py +302 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/cross_module.py +80 -0
- lfx/schema/data.py +309 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +184 -0
- lfx/schema/json_schema.py +186 -0
- lfx/schema/log.py +62 -0
- lfx/schema/message.py +493 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +180 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +142 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +26 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/database/__init__.py +5 -0
- lfx/services/database/service.py +25 -0
- lfx/services/deps.py +194 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +185 -0
- lfx/services/mcp_composer/__init__.py +6 -0
- lfx/services/mcp_composer/factory.py +16 -0
- lfx/services/mcp_composer/service.py +1441 -0
- lfx/services/schema.py +21 -0
- lfx/services/session.py +87 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +133 -0
- lfx/services/settings/base.py +668 -0
- lfx/services/settings/constants.py +43 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +11 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +185 -0
- lfx/services/storage/service.py +177 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +260 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +214 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +233 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +34 -0
- lfx/utils/image.py +79 -0
- lfx/utils/langflow_utils.py +52 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/ssrf_protection.py +384 -0
- lfx/utils/util.py +626 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/validate_cloud.py +26 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.2.0.dev25.dist-info/METADATA +312 -0
- lfx_nightly-0.2.0.dev25.dist-info/RECORD +769 -0
- lfx_nightly-0.2.0.dev25.dist-info/WHEEL +4 -0
- lfx_nightly-0.2.0.dev25.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,2584 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
from contextlib import suppress
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from composio import Composio
|
|
8
|
+
from composio_langchain import LangchainProvider
|
|
9
|
+
from langchain_core.tools import Tool
|
|
10
|
+
|
|
11
|
+
from lfx.base.mcp.util import create_input_schema_from_json_schema
|
|
12
|
+
from lfx.custom.custom_component.component import Component
|
|
13
|
+
from lfx.inputs.inputs import (
|
|
14
|
+
AuthInput,
|
|
15
|
+
DropdownInput,
|
|
16
|
+
FileInput,
|
|
17
|
+
InputTypes,
|
|
18
|
+
MessageTextInput,
|
|
19
|
+
MultilineInput,
|
|
20
|
+
SecretStrInput,
|
|
21
|
+
SortableListInput,
|
|
22
|
+
StrInput,
|
|
23
|
+
TabInput,
|
|
24
|
+
)
|
|
25
|
+
from lfx.io import Output
|
|
26
|
+
from lfx.io.schema import flatten_schema, schema_to_langflow_inputs
|
|
27
|
+
from lfx.log.logger import logger
|
|
28
|
+
from lfx.schema.data import Data
|
|
29
|
+
from lfx.schema.dataframe import DataFrame
|
|
30
|
+
from lfx.schema.message import Message
|
|
31
|
+
from lfx.utils.validate_cloud import raise_error_if_astra_cloud_disable_component
|
|
32
|
+
|
|
33
|
+
disable_component_in_astra_cloud_msg = (
|
|
34
|
+
"Composio tools are not supported in Astra cloud environment. "
|
|
35
|
+
"Please use local storage mode or cloud-based versions of the tools."
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ComposioBaseComponent(Component):
|
|
40
|
+
"""Base class for Composio components with common functionality."""
|
|
41
|
+
|
|
42
|
+
default_tools_limit: int = 5
|
|
43
|
+
|
|
44
|
+
_base_inputs = [
|
|
45
|
+
MessageTextInput(
|
|
46
|
+
name="entity_id",
|
|
47
|
+
display_name="Entity ID",
|
|
48
|
+
value="default",
|
|
49
|
+
advanced=True,
|
|
50
|
+
tool_mode=True,
|
|
51
|
+
),
|
|
52
|
+
SecretStrInput(
|
|
53
|
+
name="api_key",
|
|
54
|
+
display_name="Composio API Key",
|
|
55
|
+
required=True,
|
|
56
|
+
real_time_refresh=True,
|
|
57
|
+
value="COMPOSIO_API_KEY",
|
|
58
|
+
),
|
|
59
|
+
DropdownInput(
|
|
60
|
+
name="auth_mode",
|
|
61
|
+
display_name="Auth Mode",
|
|
62
|
+
options=[],
|
|
63
|
+
placeholder="Select auth mode",
|
|
64
|
+
toggle=True,
|
|
65
|
+
toggle_disable=True,
|
|
66
|
+
show=False,
|
|
67
|
+
real_time_refresh=True,
|
|
68
|
+
helper_text="Choose how to authenticate with the toolkit.",
|
|
69
|
+
),
|
|
70
|
+
AuthInput(
|
|
71
|
+
name="auth_link",
|
|
72
|
+
value="",
|
|
73
|
+
auth_tooltip="Please insert a valid Composio API Key.",
|
|
74
|
+
show=False,
|
|
75
|
+
),
|
|
76
|
+
# Pre-defined placeholder fields for dynamic auth - hidden by default
|
|
77
|
+
SecretStrInput(
|
|
78
|
+
name="client_id",
|
|
79
|
+
display_name="Client ID",
|
|
80
|
+
info="",
|
|
81
|
+
show=False,
|
|
82
|
+
value="",
|
|
83
|
+
required=False,
|
|
84
|
+
real_time_refresh=True,
|
|
85
|
+
),
|
|
86
|
+
SecretStrInput(
|
|
87
|
+
name="client_secret",
|
|
88
|
+
display_name="Client Secret",
|
|
89
|
+
info="",
|
|
90
|
+
show=False,
|
|
91
|
+
value="",
|
|
92
|
+
required=False,
|
|
93
|
+
real_time_refresh=True,
|
|
94
|
+
),
|
|
95
|
+
StrInput(
|
|
96
|
+
name="verification_token",
|
|
97
|
+
display_name="Verification Token",
|
|
98
|
+
info="",
|
|
99
|
+
show=False,
|
|
100
|
+
value="",
|
|
101
|
+
required=False,
|
|
102
|
+
real_time_refresh=True,
|
|
103
|
+
),
|
|
104
|
+
StrInput(
|
|
105
|
+
name="redirect_uri",
|
|
106
|
+
display_name="Redirect URI",
|
|
107
|
+
info="",
|
|
108
|
+
show=False,
|
|
109
|
+
value="",
|
|
110
|
+
required=False,
|
|
111
|
+
real_time_refresh=True,
|
|
112
|
+
),
|
|
113
|
+
StrInput(
|
|
114
|
+
name="authorization_url",
|
|
115
|
+
display_name="Authorization URL",
|
|
116
|
+
info="",
|
|
117
|
+
show=False,
|
|
118
|
+
value="",
|
|
119
|
+
required=False,
|
|
120
|
+
real_time_refresh=True,
|
|
121
|
+
),
|
|
122
|
+
StrInput(
|
|
123
|
+
name="token_url",
|
|
124
|
+
display_name="Token URL",
|
|
125
|
+
info="",
|
|
126
|
+
show=False,
|
|
127
|
+
value="",
|
|
128
|
+
required=False,
|
|
129
|
+
real_time_refresh=True,
|
|
130
|
+
),
|
|
131
|
+
# API Key auth fields
|
|
132
|
+
SecretStrInput(
|
|
133
|
+
name="api_key_field",
|
|
134
|
+
display_name="API Key",
|
|
135
|
+
info="",
|
|
136
|
+
show=False,
|
|
137
|
+
value="",
|
|
138
|
+
required=False,
|
|
139
|
+
real_time_refresh=True,
|
|
140
|
+
),
|
|
141
|
+
SecretStrInput(
|
|
142
|
+
name="generic_api_key",
|
|
143
|
+
display_name="API Key",
|
|
144
|
+
info="Enter API key on Composio page",
|
|
145
|
+
show=False,
|
|
146
|
+
value="",
|
|
147
|
+
required=False,
|
|
148
|
+
real_time_refresh=True,
|
|
149
|
+
),
|
|
150
|
+
SecretStrInput(
|
|
151
|
+
name="token",
|
|
152
|
+
display_name="Token",
|
|
153
|
+
info="",
|
|
154
|
+
show=False,
|
|
155
|
+
value="",
|
|
156
|
+
required=False,
|
|
157
|
+
real_time_refresh=True,
|
|
158
|
+
),
|
|
159
|
+
SecretStrInput(
|
|
160
|
+
name="access_token",
|
|
161
|
+
display_name="Access Token",
|
|
162
|
+
info="",
|
|
163
|
+
show=False,
|
|
164
|
+
value="",
|
|
165
|
+
required=False,
|
|
166
|
+
real_time_refresh=True,
|
|
167
|
+
),
|
|
168
|
+
SecretStrInput(
|
|
169
|
+
name="refresh_token",
|
|
170
|
+
display_name="Refresh Token",
|
|
171
|
+
info="",
|
|
172
|
+
show=False,
|
|
173
|
+
value="",
|
|
174
|
+
required=False,
|
|
175
|
+
real_time_refresh=True,
|
|
176
|
+
),
|
|
177
|
+
# Basic Auth fields
|
|
178
|
+
StrInput(
|
|
179
|
+
name="username",
|
|
180
|
+
display_name="Username",
|
|
181
|
+
info="",
|
|
182
|
+
show=False,
|
|
183
|
+
value="",
|
|
184
|
+
required=False,
|
|
185
|
+
real_time_refresh=True,
|
|
186
|
+
),
|
|
187
|
+
SecretStrInput(
|
|
188
|
+
name="password",
|
|
189
|
+
display_name="Password",
|
|
190
|
+
info="",
|
|
191
|
+
show=False,
|
|
192
|
+
value="",
|
|
193
|
+
required=False,
|
|
194
|
+
real_time_refresh=True,
|
|
195
|
+
),
|
|
196
|
+
# Other common auth fields
|
|
197
|
+
StrInput(
|
|
198
|
+
name="domain",
|
|
199
|
+
display_name="Domain",
|
|
200
|
+
info="",
|
|
201
|
+
show=False,
|
|
202
|
+
value="",
|
|
203
|
+
required=False,
|
|
204
|
+
real_time_refresh=True,
|
|
205
|
+
),
|
|
206
|
+
StrInput(
|
|
207
|
+
name="base_url",
|
|
208
|
+
display_name="Base URL",
|
|
209
|
+
info="",
|
|
210
|
+
show=False,
|
|
211
|
+
value="",
|
|
212
|
+
required=False,
|
|
213
|
+
real_time_refresh=True,
|
|
214
|
+
),
|
|
215
|
+
SecretStrInput(
|
|
216
|
+
name="bearer_token",
|
|
217
|
+
display_name="Bearer Token",
|
|
218
|
+
info="",
|
|
219
|
+
show=False,
|
|
220
|
+
value="",
|
|
221
|
+
required=False,
|
|
222
|
+
real_time_refresh=True,
|
|
223
|
+
),
|
|
224
|
+
SecretStrInput(
|
|
225
|
+
name="authorization_code",
|
|
226
|
+
display_name="Authorization Code",
|
|
227
|
+
info="",
|
|
228
|
+
show=False,
|
|
229
|
+
value="",
|
|
230
|
+
required=False,
|
|
231
|
+
real_time_refresh=True,
|
|
232
|
+
),
|
|
233
|
+
StrInput(
|
|
234
|
+
name="scopes",
|
|
235
|
+
display_name="Scopes",
|
|
236
|
+
info="",
|
|
237
|
+
show=False,
|
|
238
|
+
value="",
|
|
239
|
+
required=False,
|
|
240
|
+
real_time_refresh=True,
|
|
241
|
+
),
|
|
242
|
+
# Add more common auth fields
|
|
243
|
+
StrInput(
|
|
244
|
+
name="subdomain",
|
|
245
|
+
display_name="Subdomain",
|
|
246
|
+
info="",
|
|
247
|
+
show=False,
|
|
248
|
+
value="",
|
|
249
|
+
required=False,
|
|
250
|
+
real_time_refresh=True,
|
|
251
|
+
),
|
|
252
|
+
StrInput(
|
|
253
|
+
name="instance_url",
|
|
254
|
+
display_name="Instance URL",
|
|
255
|
+
info="",
|
|
256
|
+
show=False,
|
|
257
|
+
value="",
|
|
258
|
+
required=False,
|
|
259
|
+
real_time_refresh=True,
|
|
260
|
+
),
|
|
261
|
+
StrInput(
|
|
262
|
+
name="tenant_id",
|
|
263
|
+
display_name="Tenant ID",
|
|
264
|
+
info="",
|
|
265
|
+
show=False,
|
|
266
|
+
value="",
|
|
267
|
+
required=False,
|
|
268
|
+
real_time_refresh=True,
|
|
269
|
+
),
|
|
270
|
+
SortableListInput(
|
|
271
|
+
name="action_button",
|
|
272
|
+
display_name="Action",
|
|
273
|
+
placeholder="Select action",
|
|
274
|
+
options=[],
|
|
275
|
+
value="disabled",
|
|
276
|
+
helper_text="Please connect before selecting actions.",
|
|
277
|
+
helper_text_metadata={"variant": "destructive"},
|
|
278
|
+
show=True,
|
|
279
|
+
required=False,
|
|
280
|
+
real_time_refresh=True,
|
|
281
|
+
limit=1,
|
|
282
|
+
),
|
|
283
|
+
]
|
|
284
|
+
|
|
285
|
+
_name_sanitizer = re.compile(r"[^a-zA-Z0-9_-]")
|
|
286
|
+
|
|
287
|
+
# Class-level caches
|
|
288
|
+
_actions_cache: dict[str, dict[str, Any]] = {}
|
|
289
|
+
_action_schema_cache: dict[str, dict[str, Any]] = {}
|
|
290
|
+
# Track all auth field names discovered across all toolkits
|
|
291
|
+
_all_auth_field_names: set[str] = set()
|
|
292
|
+
|
|
293
|
+
@classmethod
|
|
294
|
+
def get_actions_cache(cls) -> dict[str, dict[str, Any]]:
|
|
295
|
+
"""Get the class-level actions cache."""
|
|
296
|
+
return cls._actions_cache
|
|
297
|
+
|
|
298
|
+
@classmethod
|
|
299
|
+
def get_action_schema_cache(cls) -> dict[str, dict[str, Any]]:
|
|
300
|
+
"""Get the class-level action schema cache."""
|
|
301
|
+
return cls._action_schema_cache
|
|
302
|
+
|
|
303
|
+
@classmethod
|
|
304
|
+
def get_all_auth_field_names(cls) -> set[str]:
|
|
305
|
+
"""Get all auth field names discovered across toolkits."""
|
|
306
|
+
return cls._all_auth_field_names
|
|
307
|
+
|
|
308
|
+
outputs = [
|
|
309
|
+
Output(name="dataFrame", display_name="DataFrame", method="as_dataframe"),
|
|
310
|
+
]
|
|
311
|
+
|
|
312
|
+
inputs = list(_base_inputs)
|
|
313
|
+
|
|
314
|
+
def __init__(self, **kwargs):
|
|
315
|
+
"""Initialize instance variables to prevent shared state between components."""
|
|
316
|
+
super().__init__(**kwargs)
|
|
317
|
+
self._all_fields: set[str] = set()
|
|
318
|
+
self._bool_variables: set[str] = set()
|
|
319
|
+
self._actions_data: dict[str, dict[str, Any]] = {}
|
|
320
|
+
self._default_tools: set[str] = set()
|
|
321
|
+
self._display_to_key_map: dict[str, str] = {}
|
|
322
|
+
self._key_to_display_map: dict[str, str] = {}
|
|
323
|
+
self._sanitized_names: dict[str, str] = {}
|
|
324
|
+
self._action_schemas: dict[str, Any] = {}
|
|
325
|
+
# Toolkit schema cache per instance
|
|
326
|
+
self._toolkit_schema: dict[str, Any] | None = None
|
|
327
|
+
# Track generated custom auth inputs to hide/show/reset
|
|
328
|
+
self._auth_dynamic_fields: set[str] = set()
|
|
329
|
+
|
|
330
|
+
def as_message(self) -> Message:
|
|
331
|
+
result = self.execute_action()
|
|
332
|
+
if result is None:
|
|
333
|
+
return Message(text="Action execution returned no result")
|
|
334
|
+
return Message(text=str(result))
|
|
335
|
+
|
|
336
|
+
def as_dataframe(self) -> DataFrame:
|
|
337
|
+
# Check if we're in Astra cloud environment and raise an error if we are.
|
|
338
|
+
raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
|
|
339
|
+
result = self.execute_action()
|
|
340
|
+
|
|
341
|
+
if isinstance(result, dict):
|
|
342
|
+
result = [result]
|
|
343
|
+
# Build DataFrame and avoid exposing a 'data' attribute via column access,
|
|
344
|
+
# which interferes with logging utilities that probe for '.data'.
|
|
345
|
+
df = DataFrame(result)
|
|
346
|
+
if "data" in df.columns:
|
|
347
|
+
df = df.rename(columns={"data": "_data"})
|
|
348
|
+
return df
|
|
349
|
+
|
|
350
|
+
def as_data(self) -> Data:
|
|
351
|
+
result = self.execute_action()
|
|
352
|
+
return Data(results=result)
|
|
353
|
+
|
|
354
|
+
def _build_action_maps(self):
|
|
355
|
+
"""Build lookup maps for action names."""
|
|
356
|
+
if not self._display_to_key_map or not self._key_to_display_map:
|
|
357
|
+
self._display_to_key_map = {data["display_name"]: key for key, data in self._actions_data.items()}
|
|
358
|
+
self._key_to_display_map = {key: data["display_name"] for key, data in self._actions_data.items()}
|
|
359
|
+
self._sanitized_names = {
|
|
360
|
+
action: self._name_sanitizer.sub("-", self.sanitize_action_name(action))
|
|
361
|
+
for action in self._actions_data
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
def sanitize_action_name(self, action_name: str) -> str:
|
|
365
|
+
"""Convert action name to display name using lookup."""
|
|
366
|
+
self._build_action_maps()
|
|
367
|
+
return self._key_to_display_map.get(action_name, action_name)
|
|
368
|
+
|
|
369
|
+
def desanitize_action_name(self, action_name: str) -> str:
|
|
370
|
+
"""Convert display name to action key using lookup."""
|
|
371
|
+
self._build_action_maps()
|
|
372
|
+
return self._display_to_key_map.get(action_name, action_name)
|
|
373
|
+
|
|
374
|
+
def _get_action_fields(self, action_key: str | None) -> set[str]:
|
|
375
|
+
"""Get fields for an action."""
|
|
376
|
+
if action_key is None:
|
|
377
|
+
return set()
|
|
378
|
+
return set(self._actions_data[action_key]["action_fields"]) if action_key in self._actions_data else set()
|
|
379
|
+
|
|
380
|
+
def _build_wrapper(self) -> Composio:
|
|
381
|
+
"""Build the Composio wrapper."""
|
|
382
|
+
# Check if we're in Astra cloud environment and raise an error if we are.
|
|
383
|
+
raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
|
|
384
|
+
try:
|
|
385
|
+
if not self.api_key:
|
|
386
|
+
msg = "Composio API Key is required"
|
|
387
|
+
raise ValueError(msg)
|
|
388
|
+
return Composio(api_key=self.api_key, provider=LangchainProvider())
|
|
389
|
+
|
|
390
|
+
except ValueError as e:
|
|
391
|
+
logger.error(f"Error building Composio wrapper: {e}")
|
|
392
|
+
msg = "Please provide a valid Composio API Key in the component settings"
|
|
393
|
+
raise ValueError(msg) from e
|
|
394
|
+
|
|
395
|
+
def show_hide_fields(self, build_config: dict, field_value: Any):
|
|
396
|
+
"""Optimized field visibility updates by only modifying show values."""
|
|
397
|
+
if not field_value:
|
|
398
|
+
for field in self._all_fields:
|
|
399
|
+
build_config[field]["show"] = False
|
|
400
|
+
if field in self._bool_variables:
|
|
401
|
+
build_config[field]["value"] = False
|
|
402
|
+
else:
|
|
403
|
+
build_config[field]["value"] = ""
|
|
404
|
+
return
|
|
405
|
+
|
|
406
|
+
action_key = None
|
|
407
|
+
if isinstance(field_value, list) and field_value:
|
|
408
|
+
action_key = self.desanitize_action_name(field_value[0]["name"])
|
|
409
|
+
else:
|
|
410
|
+
action_key = field_value
|
|
411
|
+
|
|
412
|
+
fields_to_show = self._get_action_fields(action_key)
|
|
413
|
+
|
|
414
|
+
for field in self._all_fields:
|
|
415
|
+
should_show = field in fields_to_show
|
|
416
|
+
if build_config[field]["show"] != should_show:
|
|
417
|
+
build_config[field]["show"] = should_show
|
|
418
|
+
if not should_show:
|
|
419
|
+
if field in self._bool_variables:
|
|
420
|
+
build_config[field]["value"] = False
|
|
421
|
+
else:
|
|
422
|
+
build_config[field]["value"] = ""
|
|
423
|
+
|
|
424
|
+
def _populate_actions_data(self):
|
|
425
|
+
"""Fetch the list of actions for the toolkit and build helper maps."""
|
|
426
|
+
if self._actions_data:
|
|
427
|
+
return
|
|
428
|
+
|
|
429
|
+
# Try to load from the class-level cache
|
|
430
|
+
toolkit_slug = self.app_name.lower()
|
|
431
|
+
if toolkit_slug in self.__class__.get_actions_cache():
|
|
432
|
+
# Deep-copy so that any mutation on this instance does not affect the
|
|
433
|
+
# cached master copy.
|
|
434
|
+
self._actions_data = copy.deepcopy(self.__class__.get_actions_cache()[toolkit_slug])
|
|
435
|
+
self._action_schemas = copy.deepcopy(self.__class__.get_action_schema_cache().get(toolkit_slug, {}))
|
|
436
|
+
logger.debug(f"Loaded actions for {toolkit_slug} from in-process cache")
|
|
437
|
+
return
|
|
438
|
+
|
|
439
|
+
api_key = getattr(self, "api_key", None)
|
|
440
|
+
if not api_key:
|
|
441
|
+
logger.warning("API key is missing. Cannot populate actions data.")
|
|
442
|
+
return
|
|
443
|
+
|
|
444
|
+
try:
|
|
445
|
+
composio = self._build_wrapper()
|
|
446
|
+
toolkit_slug = self.app_name.lower()
|
|
447
|
+
|
|
448
|
+
raw_tools = composio.tools.get_raw_composio_tools(toolkits=[toolkit_slug], limit=999)
|
|
449
|
+
|
|
450
|
+
if not raw_tools:
|
|
451
|
+
msg = f"Toolkit '{toolkit_slug}' not found or has no available tools"
|
|
452
|
+
raise ValueError(msg)
|
|
453
|
+
|
|
454
|
+
for raw_tool in raw_tools:
|
|
455
|
+
try:
|
|
456
|
+
# Convert raw_tool to dict-like structure
|
|
457
|
+
tool_dict = raw_tool.__dict__ if hasattr(raw_tool, "__dict__") else raw_tool
|
|
458
|
+
|
|
459
|
+
if not tool_dict:
|
|
460
|
+
logger.warning(f"Tool is None or empty: {raw_tool}")
|
|
461
|
+
continue
|
|
462
|
+
|
|
463
|
+
action_key = tool_dict.get("slug")
|
|
464
|
+
if not action_key:
|
|
465
|
+
logger.warning(f"Action key (slug) is missing in tool: {tool_dict}")
|
|
466
|
+
continue
|
|
467
|
+
|
|
468
|
+
# Human-friendly display name
|
|
469
|
+
display_name = tool_dict.get("name") or tool_dict.get("display_name")
|
|
470
|
+
if not display_name:
|
|
471
|
+
# Better fallback: convert GMAIL_SEND_EMAIL to "Send Email"
|
|
472
|
+
# Remove app prefix and convert to title case
|
|
473
|
+
clean_name = action_key
|
|
474
|
+
clean_name = clean_name.removeprefix(f"{self.app_name.upper()}_")
|
|
475
|
+
# Convert underscores to spaces and title case
|
|
476
|
+
display_name = clean_name.replace("_", " ").title()
|
|
477
|
+
|
|
478
|
+
# Build list of parameter names and track bool fields
|
|
479
|
+
parameters_schema = tool_dict.get("input_parameters", {})
|
|
480
|
+
if parameters_schema is None:
|
|
481
|
+
logger.warning(f"Parameters schema is None for action key: {action_key}")
|
|
482
|
+
# Still add the action but with empty fields
|
|
483
|
+
# Extract version information from the tool
|
|
484
|
+
version = tool_dict.get("version")
|
|
485
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
486
|
+
|
|
487
|
+
self._action_schemas[action_key] = tool_dict
|
|
488
|
+
self._actions_data[action_key] = {
|
|
489
|
+
"display_name": display_name,
|
|
490
|
+
"action_fields": [],
|
|
491
|
+
"file_upload_fields": set(),
|
|
492
|
+
"version": version,
|
|
493
|
+
"available_versions": available_versions,
|
|
494
|
+
}
|
|
495
|
+
continue
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
# Special handling for unusual schema structures
|
|
499
|
+
if not isinstance(parameters_schema, dict):
|
|
500
|
+
# Try to convert if it's a model object
|
|
501
|
+
if hasattr(parameters_schema, "model_dump"):
|
|
502
|
+
parameters_schema = parameters_schema.model_dump()
|
|
503
|
+
elif hasattr(parameters_schema, "__dict__"):
|
|
504
|
+
parameters_schema = parameters_schema.__dict__
|
|
505
|
+
else:
|
|
506
|
+
logger.warning(f"Cannot process parameters schema for {action_key}, skipping")
|
|
507
|
+
# Extract version information from the tool
|
|
508
|
+
version = tool_dict.get("version")
|
|
509
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
510
|
+
|
|
511
|
+
self._action_schemas[action_key] = tool_dict
|
|
512
|
+
self._actions_data[action_key] = {
|
|
513
|
+
"display_name": display_name,
|
|
514
|
+
"action_fields": [],
|
|
515
|
+
"file_upload_fields": set(),
|
|
516
|
+
"version": version,
|
|
517
|
+
"available_versions": available_versions,
|
|
518
|
+
}
|
|
519
|
+
continue
|
|
520
|
+
|
|
521
|
+
# Validate parameters_schema has required structure before flattening
|
|
522
|
+
if not parameters_schema.get("properties") and not parameters_schema.get("$defs"):
|
|
523
|
+
# Create a minimal valid schema to avoid errors
|
|
524
|
+
parameters_schema = {"type": "object", "properties": {}}
|
|
525
|
+
|
|
526
|
+
# Sanitize the schema before passing to flatten_schema
|
|
527
|
+
# Handle case where 'required' is explicitly None (causes "'NoneType' object is not iterable")
|
|
528
|
+
if parameters_schema.get("required") is None:
|
|
529
|
+
parameters_schema = parameters_schema.copy() # Don't modify the original
|
|
530
|
+
parameters_schema["required"] = []
|
|
531
|
+
|
|
532
|
+
try:
|
|
533
|
+
# Preserve original descriptions before flattening to restore if lost
|
|
534
|
+
original_descriptions = {}
|
|
535
|
+
original_props = parameters_schema.get("properties", {})
|
|
536
|
+
for prop_name, prop_schema in original_props.items():
|
|
537
|
+
if isinstance(prop_schema, dict) and "description" in prop_schema:
|
|
538
|
+
original_descriptions[prop_name] = prop_schema["description"]
|
|
539
|
+
|
|
540
|
+
flat_schema = flatten_schema(parameters_schema)
|
|
541
|
+
|
|
542
|
+
# Restore lost descriptions in flattened schema
|
|
543
|
+
if flat_schema and isinstance(flat_schema, dict) and "properties" in flat_schema:
|
|
544
|
+
flat_props = flat_schema["properties"]
|
|
545
|
+
for field_name, field_schema in flat_props.items():
|
|
546
|
+
# Check if this field lost its description during flattening
|
|
547
|
+
if isinstance(field_schema, dict) and "description" not in field_schema:
|
|
548
|
+
# Try to find the original description
|
|
549
|
+
# Handle array fields like bcc[0] -> bcc
|
|
550
|
+
base_field_name = field_name.replace("[0]", "")
|
|
551
|
+
if base_field_name in original_descriptions:
|
|
552
|
+
field_schema["description"] = original_descriptions[base_field_name]
|
|
553
|
+
elif field_name in original_descriptions:
|
|
554
|
+
field_schema["description"] = original_descriptions[field_name]
|
|
555
|
+
except (KeyError, TypeError, ValueError):
|
|
556
|
+
# Extract version information from the tool
|
|
557
|
+
version = tool_dict.get("version")
|
|
558
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
559
|
+
|
|
560
|
+
self._action_schemas[action_key] = tool_dict
|
|
561
|
+
self._actions_data[action_key] = {
|
|
562
|
+
"display_name": display_name,
|
|
563
|
+
"action_fields": [],
|
|
564
|
+
"file_upload_fields": set(),
|
|
565
|
+
"version": version,
|
|
566
|
+
"available_versions": available_versions,
|
|
567
|
+
}
|
|
568
|
+
continue
|
|
569
|
+
|
|
570
|
+
if flat_schema is None:
|
|
571
|
+
logger.warning(f"Flat schema is None for action key: {action_key}")
|
|
572
|
+
# Still add the action but with empty fields so the UI doesn't break
|
|
573
|
+
# Extract version information from the tool
|
|
574
|
+
version = tool_dict.get("version")
|
|
575
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
576
|
+
|
|
577
|
+
self._action_schemas[action_key] = tool_dict
|
|
578
|
+
self._actions_data[action_key] = {
|
|
579
|
+
"display_name": display_name,
|
|
580
|
+
"action_fields": [],
|
|
581
|
+
"file_upload_fields": set(),
|
|
582
|
+
"version": version,
|
|
583
|
+
"available_versions": available_versions,
|
|
584
|
+
}
|
|
585
|
+
continue
|
|
586
|
+
|
|
587
|
+
# Extract field names and detect file upload fields during parsing
|
|
588
|
+
raw_action_fields = list(flat_schema.get("properties", {}).keys())
|
|
589
|
+
action_fields = []
|
|
590
|
+
attachment_related_found = False
|
|
591
|
+
file_upload_fields = set()
|
|
592
|
+
|
|
593
|
+
# Check original schema properties for file_uploadable fields
|
|
594
|
+
original_props = parameters_schema.get("properties", {})
|
|
595
|
+
|
|
596
|
+
# Determine top-level fields that should be treated as single JSON inputs
|
|
597
|
+
json_parent_fields = set()
|
|
598
|
+
for top_name, top_schema in original_props.items():
|
|
599
|
+
if isinstance(top_schema, dict) and top_schema.get("type") in {"object", "array"}:
|
|
600
|
+
json_parent_fields.add(top_name)
|
|
601
|
+
|
|
602
|
+
for field_name, field_schema in original_props.items():
|
|
603
|
+
if isinstance(field_schema, dict):
|
|
604
|
+
clean_field_name = field_name.replace("[0]", "")
|
|
605
|
+
# Check direct file_uploadable attribute
|
|
606
|
+
if field_schema.get("file_uploadable") is True:
|
|
607
|
+
file_upload_fields.add(clean_field_name)
|
|
608
|
+
|
|
609
|
+
# Check anyOf structures (like OUTLOOK_OUTLOOK_SEND_EMAIL)
|
|
610
|
+
if "anyOf" in field_schema:
|
|
611
|
+
for any_of_item in field_schema["anyOf"]:
|
|
612
|
+
if isinstance(any_of_item, dict) and any_of_item.get("file_uploadable") is True:
|
|
613
|
+
file_upload_fields.add(clean_field_name)
|
|
614
|
+
|
|
615
|
+
for field in raw_action_fields:
|
|
616
|
+
clean_field = field.replace("[0]", "")
|
|
617
|
+
# Skip subfields of JSON parents; we will expose the parent as a single field
|
|
618
|
+
top_prefix = clean_field.split(".")[0].split("[")[0]
|
|
619
|
+
if top_prefix in json_parent_fields and "." in clean_field:
|
|
620
|
+
continue
|
|
621
|
+
# Check if this field is attachment-related
|
|
622
|
+
if clean_field.lower().startswith("attachment."):
|
|
623
|
+
attachment_related_found = True
|
|
624
|
+
continue # Skip individual attachment fields
|
|
625
|
+
|
|
626
|
+
# Handle conflicting field names - rename user_id to avoid conflicts with entity_id
|
|
627
|
+
if clean_field == "user_id":
|
|
628
|
+
clean_field = f"{self.app_name}_user_id"
|
|
629
|
+
|
|
630
|
+
# Handle reserved attribute name conflicts (e.g., 'status', 'name')
|
|
631
|
+
# Prefix with app name to prevent clashes with component attributes
|
|
632
|
+
if clean_field in {"status", "name"}:
|
|
633
|
+
clean_field = f"{self.app_name}_{clean_field}"
|
|
634
|
+
|
|
635
|
+
action_fields.append(clean_field)
|
|
636
|
+
|
|
637
|
+
# Add consolidated attachment field if we found attachment-related fields
|
|
638
|
+
if attachment_related_found:
|
|
639
|
+
action_fields.append("attachment")
|
|
640
|
+
file_upload_fields.add("attachment") # Attachment fields are also file upload fields
|
|
641
|
+
|
|
642
|
+
# Ensure parents for object/array are present as fields (single JSON field)
|
|
643
|
+
for parent in json_parent_fields:
|
|
644
|
+
if parent not in action_fields:
|
|
645
|
+
action_fields.append(parent)
|
|
646
|
+
|
|
647
|
+
# Track boolean parameters so we can coerce them later
|
|
648
|
+
properties = flat_schema.get("properties", {})
|
|
649
|
+
if properties:
|
|
650
|
+
for p_name, p_schema in properties.items():
|
|
651
|
+
if isinstance(p_schema, dict) and p_schema.get("type") == "boolean":
|
|
652
|
+
# Use cleaned field name for boolean tracking
|
|
653
|
+
clean_field_name = p_name.replace("[0]", "")
|
|
654
|
+
self._bool_variables.add(clean_field_name)
|
|
655
|
+
|
|
656
|
+
# Extract version information from the tool
|
|
657
|
+
version = tool_dict.get("version")
|
|
658
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
659
|
+
|
|
660
|
+
self._action_schemas[action_key] = tool_dict
|
|
661
|
+
self._actions_data[action_key] = {
|
|
662
|
+
"display_name": display_name,
|
|
663
|
+
"action_fields": action_fields,
|
|
664
|
+
"file_upload_fields": file_upload_fields,
|
|
665
|
+
"version": version,
|
|
666
|
+
"available_versions": available_versions,
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
except (KeyError, TypeError, ValueError) as flatten_error:
|
|
670
|
+
logger.error(f"flatten_schema failed for {action_key}: {flatten_error}")
|
|
671
|
+
# Extract version information from the tool
|
|
672
|
+
version = tool_dict.get("version")
|
|
673
|
+
available_versions = tool_dict.get("available_versions", [])
|
|
674
|
+
|
|
675
|
+
self._action_schemas[action_key] = tool_dict
|
|
676
|
+
self._actions_data[action_key] = {
|
|
677
|
+
"display_name": display_name,
|
|
678
|
+
"action_fields": [],
|
|
679
|
+
"file_upload_fields": set(),
|
|
680
|
+
"version": version,
|
|
681
|
+
"available_versions": available_versions,
|
|
682
|
+
}
|
|
683
|
+
continue
|
|
684
|
+
|
|
685
|
+
except ValueError as e:
|
|
686
|
+
logger.warning(f"Failed processing Composio tool for action {raw_tool}: {e}")
|
|
687
|
+
|
|
688
|
+
# Helper look-ups used elsewhere
|
|
689
|
+
self._all_fields = {f for d in self._actions_data.values() for f in d["action_fields"]}
|
|
690
|
+
self._build_action_maps()
|
|
691
|
+
|
|
692
|
+
# Cache actions for this toolkit so subsequent component instances
|
|
693
|
+
# can reuse them without hitting the Composio API again.
|
|
694
|
+
self.__class__.get_actions_cache()[toolkit_slug] = copy.deepcopy(self._actions_data)
|
|
695
|
+
self.__class__.get_action_schema_cache()[toolkit_slug] = copy.deepcopy(self._action_schemas)
|
|
696
|
+
|
|
697
|
+
except ValueError as e:
|
|
698
|
+
logger.debug(f"Could not populate Composio actions for {self.app_name}: {e}")
|
|
699
|
+
|
|
700
|
+
def _validate_schema_inputs(self, action_key: str) -> list[InputTypes]:
|
|
701
|
+
"""Convert the JSON schema for *action_key* into Langflow input objects."""
|
|
702
|
+
# Skip validation for default/placeholder values
|
|
703
|
+
if action_key in ("disabled", "placeholder", ""):
|
|
704
|
+
logger.debug(f"Skipping schema validation for placeholder value: {action_key}")
|
|
705
|
+
return []
|
|
706
|
+
|
|
707
|
+
schema_dict = self._action_schemas.get(action_key)
|
|
708
|
+
if not schema_dict:
|
|
709
|
+
logger.warning(f"No schema found for action key: {action_key}")
|
|
710
|
+
return []
|
|
711
|
+
|
|
712
|
+
try:
|
|
713
|
+
parameters_schema = schema_dict.get("input_parameters", {})
|
|
714
|
+
if parameters_schema is None:
|
|
715
|
+
logger.warning(f"Parameters schema is None for action key: {action_key}")
|
|
716
|
+
return []
|
|
717
|
+
|
|
718
|
+
# Check if parameters_schema has the expected structure
|
|
719
|
+
if not isinstance(parameters_schema, dict):
|
|
720
|
+
logger.warning(
|
|
721
|
+
f"Parameters schema is not a dict for action key: {action_key}, got: {type(parameters_schema)}"
|
|
722
|
+
)
|
|
723
|
+
return []
|
|
724
|
+
|
|
725
|
+
# Validate parameters_schema has required structure before flattening
|
|
726
|
+
if not parameters_schema.get("properties") and not parameters_schema.get("$defs"):
|
|
727
|
+
# Create a minimal valid schema to avoid errors
|
|
728
|
+
parameters_schema = {"type": "object", "properties": {}}
|
|
729
|
+
|
|
730
|
+
# Sanitize the schema before passing to flatten_schema
|
|
731
|
+
# Handle case where 'required' is explicitly None (causes "'NoneType' object is not iterable")
|
|
732
|
+
if parameters_schema.get("required") is None:
|
|
733
|
+
parameters_schema = parameters_schema.copy() # Don't modify the original
|
|
734
|
+
parameters_schema["required"] = []
|
|
735
|
+
|
|
736
|
+
# Also get top-level required fields from original schema
|
|
737
|
+
original_required = set(parameters_schema.get("required", []))
|
|
738
|
+
|
|
739
|
+
try:
|
|
740
|
+
# Preserve original descriptions before flattening to restore if lost
|
|
741
|
+
original_descriptions = {}
|
|
742
|
+
original_props = parameters_schema.get("properties", {})
|
|
743
|
+
for prop_name, prop_schema in original_props.items():
|
|
744
|
+
if isinstance(prop_schema, dict) and "description" in prop_schema:
|
|
745
|
+
original_descriptions[prop_name] = prop_schema["description"]
|
|
746
|
+
|
|
747
|
+
flat_schema = flatten_schema(parameters_schema)
|
|
748
|
+
|
|
749
|
+
# Restore lost descriptions in flattened schema
|
|
750
|
+
if flat_schema and isinstance(flat_schema, dict) and "properties" in flat_schema:
|
|
751
|
+
flat_props = flat_schema["properties"]
|
|
752
|
+
for field_name, field_schema in flat_props.items():
|
|
753
|
+
# Check if this field lost its description during flattening
|
|
754
|
+
if isinstance(field_schema, dict) and "description" not in field_schema:
|
|
755
|
+
# Try to find the original description
|
|
756
|
+
# Handle array fields like bcc[0] -> bcc
|
|
757
|
+
base_field_name = field_name.replace("[0]", "")
|
|
758
|
+
if base_field_name in original_descriptions:
|
|
759
|
+
field_schema["description"] = original_descriptions[base_field_name]
|
|
760
|
+
elif field_name in original_descriptions:
|
|
761
|
+
field_schema["description"] = original_descriptions[field_name]
|
|
762
|
+
except (KeyError, TypeError, ValueError) as flatten_error:
|
|
763
|
+
logger.error(f"flatten_schema failed for {action_key}: {flatten_error}")
|
|
764
|
+
return []
|
|
765
|
+
|
|
766
|
+
if flat_schema is None:
|
|
767
|
+
logger.warning(f"Flat schema is None for action key: {action_key}")
|
|
768
|
+
return []
|
|
769
|
+
|
|
770
|
+
# Additional check for flat_schema structure
|
|
771
|
+
if not isinstance(flat_schema, dict):
|
|
772
|
+
logger.warning(f"Flat schema is not a dict for action key: {action_key}, got: {type(flat_schema)}")
|
|
773
|
+
return []
|
|
774
|
+
|
|
775
|
+
# Ensure flat_schema has the expected structure for create_input_schema_from_json_schema
|
|
776
|
+
if flat_schema.get("type") != "object":
|
|
777
|
+
logger.warning(f"Flat schema for {action_key} is not of type 'object', got: {flat_schema.get('type')}")
|
|
778
|
+
# Fix the schema type if it's missing
|
|
779
|
+
flat_schema["type"] = "object"
|
|
780
|
+
|
|
781
|
+
if "properties" not in flat_schema:
|
|
782
|
+
flat_schema["properties"] = {}
|
|
783
|
+
|
|
784
|
+
# Clean up field names - remove [0] suffixes from array fields
|
|
785
|
+
cleaned_properties = {}
|
|
786
|
+
attachment_related_fields = set() # Track fields that are attachment-related
|
|
787
|
+
|
|
788
|
+
for field_name, field_schema in flat_schema.get("properties", {}).items():
|
|
789
|
+
# Remove [0] suffix from field names (e.g., "bcc[0]" -> "bcc", "cc[0]" -> "cc")
|
|
790
|
+
clean_field_name = field_name.replace("[0]", "")
|
|
791
|
+
|
|
792
|
+
# Check if this field is attachment-related (contains "attachment." prefix)
|
|
793
|
+
if clean_field_name.lower().startswith("attachment."):
|
|
794
|
+
attachment_related_fields.add(clean_field_name)
|
|
795
|
+
# Don't add individual attachment sub-fields to the schema
|
|
796
|
+
continue
|
|
797
|
+
|
|
798
|
+
# Handle conflicting field names - rename user_id to avoid conflicts with entity_id
|
|
799
|
+
if clean_field_name == "user_id":
|
|
800
|
+
clean_field_name = f"{self.app_name}_user_id"
|
|
801
|
+
# Update the field schema description to reflect the name change
|
|
802
|
+
field_schema_copy = field_schema.copy()
|
|
803
|
+
field_schema_copy["description"] = (
|
|
804
|
+
f"User ID for {self.app_name.title()}: " + field_schema["description"]
|
|
805
|
+
)
|
|
806
|
+
elif clean_field_name == "status":
|
|
807
|
+
clean_field_name = f"{self.app_name}_status"
|
|
808
|
+
# Update the field schema description to reflect the name change
|
|
809
|
+
field_schema_copy = field_schema.copy()
|
|
810
|
+
field_schema_copy["description"] = f"Status for {self.app_name.title()}: " + field_schema.get(
|
|
811
|
+
"description", ""
|
|
812
|
+
)
|
|
813
|
+
elif clean_field_name == "name":
|
|
814
|
+
clean_field_name = f"{self.app_name}_name"
|
|
815
|
+
# Update the field schema description to reflect the name change
|
|
816
|
+
field_schema_copy = field_schema.copy()
|
|
817
|
+
field_schema_copy["description"] = f"Name for {self.app_name.title()}: " + field_schema.get(
|
|
818
|
+
"description", ""
|
|
819
|
+
)
|
|
820
|
+
else:
|
|
821
|
+
# Use the original field schema for all other fields
|
|
822
|
+
field_schema_copy = field_schema
|
|
823
|
+
|
|
824
|
+
# Preserve the full schema information, not just the type
|
|
825
|
+
cleaned_properties[clean_field_name] = field_schema_copy
|
|
826
|
+
|
|
827
|
+
# If we found attachment-related fields, add a single "attachment" field
|
|
828
|
+
if attachment_related_fields:
|
|
829
|
+
# Create a generic attachment field schema
|
|
830
|
+
attachment_schema = {
|
|
831
|
+
"type": "string",
|
|
832
|
+
"description": "File attachment for the email",
|
|
833
|
+
"title": "Attachment",
|
|
834
|
+
}
|
|
835
|
+
cleaned_properties["attachment"] = attachment_schema
|
|
836
|
+
|
|
837
|
+
# Update the flat schema with cleaned field names
|
|
838
|
+
flat_schema["properties"] = cleaned_properties
|
|
839
|
+
|
|
840
|
+
# Also update required fields to match cleaned names
|
|
841
|
+
if flat_schema.get("required"):
|
|
842
|
+
cleaned_required = []
|
|
843
|
+
for field in flat_schema["required"]:
|
|
844
|
+
base = field.replace("[0]", "")
|
|
845
|
+
if base == "user_id":
|
|
846
|
+
cleaned_required.append(f"{self.app_name}_user_id")
|
|
847
|
+
elif base == "status":
|
|
848
|
+
cleaned_required.append(f"{self.app_name}_status")
|
|
849
|
+
elif base == "name":
|
|
850
|
+
cleaned_required.append(f"{self.app_name}_name")
|
|
851
|
+
else:
|
|
852
|
+
cleaned_required.append(base)
|
|
853
|
+
flat_schema["required"] = cleaned_required
|
|
854
|
+
|
|
855
|
+
input_schema = create_input_schema_from_json_schema(flat_schema)
|
|
856
|
+
if input_schema is None:
|
|
857
|
+
logger.warning(f"Input schema is None for action key: {action_key}")
|
|
858
|
+
return []
|
|
859
|
+
|
|
860
|
+
# Additional safety check before calling schema_to_langflow_inputs
|
|
861
|
+
if not hasattr(input_schema, "model_fields"):
|
|
862
|
+
logger.warning(f"Input schema for {action_key} does not have model_fields attribute")
|
|
863
|
+
return []
|
|
864
|
+
|
|
865
|
+
if input_schema.model_fields is None:
|
|
866
|
+
logger.warning(f"Input schema model_fields is None for {action_key}")
|
|
867
|
+
return []
|
|
868
|
+
|
|
869
|
+
result = schema_to_langflow_inputs(input_schema)
|
|
870
|
+
|
|
871
|
+
# Process inputs to handle attachment fields and set advanced status
|
|
872
|
+
if result:
|
|
873
|
+
processed_inputs = []
|
|
874
|
+
required_fields_set = set(flat_schema.get("required", []))
|
|
875
|
+
|
|
876
|
+
# Get file upload fields from stored action data
|
|
877
|
+
file_upload_fields = self._actions_data.get(action_key, {}).get("file_upload_fields", set())
|
|
878
|
+
if attachment_related_fields: # If we consolidated attachment fields
|
|
879
|
+
file_upload_fields = file_upload_fields | {"attachment"}
|
|
880
|
+
|
|
881
|
+
# Identify top-level JSON parents (object/array) to render as single CodeInput
|
|
882
|
+
top_props_for_json = set()
|
|
883
|
+
props_dict = parameters_schema.get("properties", {}) if isinstance(parameters_schema, dict) else {}
|
|
884
|
+
for top_name, top_schema in props_dict.items():
|
|
885
|
+
if isinstance(top_schema, dict) and top_schema.get("type") in {"object", "array"}:
|
|
886
|
+
top_props_for_json.add(top_name)
|
|
887
|
+
|
|
888
|
+
for inp in result:
|
|
889
|
+
if hasattr(inp, "name") and inp.name is not None:
|
|
890
|
+
# Skip flattened subfields of JSON parents; handle array prefixes (e.g., parent[0].x)
|
|
891
|
+
raw_prefix = inp.name.split(".")[0]
|
|
892
|
+
base_prefix = raw_prefix.replace("[0]", "")
|
|
893
|
+
if base_prefix in top_props_for_json and ("." in inp.name or "[" in inp.name):
|
|
894
|
+
continue
|
|
895
|
+
# Check if this specific field is a file upload field
|
|
896
|
+
if inp.name.lower() in file_upload_fields or inp.name.lower() == "attachment":
|
|
897
|
+
# Replace with FileInput for file upload fields
|
|
898
|
+
file_input = FileInput(
|
|
899
|
+
name=inp.name,
|
|
900
|
+
display_name=getattr(inp, "display_name", inp.name.replace("_", " ").title()),
|
|
901
|
+
required=inp.name in required_fields_set,
|
|
902
|
+
advanced=inp.name not in required_fields_set,
|
|
903
|
+
info=getattr(inp, "info", "Upload file for this field"),
|
|
904
|
+
show=True,
|
|
905
|
+
file_types=[
|
|
906
|
+
"csv",
|
|
907
|
+
"txt",
|
|
908
|
+
"doc",
|
|
909
|
+
"docx",
|
|
910
|
+
"xls",
|
|
911
|
+
"xlsx",
|
|
912
|
+
"pdf",
|
|
913
|
+
"png",
|
|
914
|
+
"jpg",
|
|
915
|
+
"jpeg",
|
|
916
|
+
"gif",
|
|
917
|
+
"zip",
|
|
918
|
+
"rar",
|
|
919
|
+
"ppt",
|
|
920
|
+
"pptx",
|
|
921
|
+
],
|
|
922
|
+
)
|
|
923
|
+
processed_inputs.append(file_input)
|
|
924
|
+
else:
|
|
925
|
+
# Ensure proper display_name and info are set for regular fields
|
|
926
|
+
if not hasattr(inp, "display_name") or not inp.display_name:
|
|
927
|
+
inp.display_name = inp.name.replace("_", " ").title()
|
|
928
|
+
|
|
929
|
+
# Preserve description from schema if available
|
|
930
|
+
field_schema = flat_schema.get("properties", {}).get(inp.name, {})
|
|
931
|
+
schema_description = field_schema.get("description")
|
|
932
|
+
current_info = getattr(inp, "info", None)
|
|
933
|
+
|
|
934
|
+
# Use schema description if available, otherwise keep current info or create from name
|
|
935
|
+
if schema_description:
|
|
936
|
+
inp.info = schema_description
|
|
937
|
+
elif not current_info:
|
|
938
|
+
# Fallback: create a basic description from the field name if no description exists
|
|
939
|
+
inp.info = f"{inp.name.replace('_', ' ').title()} field"
|
|
940
|
+
|
|
941
|
+
# Set advanced status for non-file-upload fields
|
|
942
|
+
if inp.name not in required_fields_set:
|
|
943
|
+
inp.advanced = True
|
|
944
|
+
|
|
945
|
+
# Skip entity_id being mapped to user_id parameter
|
|
946
|
+
if inp.name == "user_id" and getattr(self, "entity_id", None) == getattr(
|
|
947
|
+
inp, "value", None
|
|
948
|
+
):
|
|
949
|
+
continue
|
|
950
|
+
|
|
951
|
+
processed_inputs.append(inp)
|
|
952
|
+
else:
|
|
953
|
+
processed_inputs.append(inp)
|
|
954
|
+
|
|
955
|
+
# Add single CodeInput for each JSON parent field
|
|
956
|
+
props_dict = parameters_schema.get("properties", {}) if isinstance(parameters_schema, dict) else {}
|
|
957
|
+
for top_name in top_props_for_json:
|
|
958
|
+
# Avoid duplicates if already present
|
|
959
|
+
if any(getattr(i, "name", None) == top_name for i in processed_inputs):
|
|
960
|
+
continue
|
|
961
|
+
top_schema = props_dict.get(top_name, {})
|
|
962
|
+
# For MultilineInput fields (complex JSON objects/arrays)
|
|
963
|
+
is_required = top_name in original_required
|
|
964
|
+
processed_inputs.append(
|
|
965
|
+
MultilineInput(
|
|
966
|
+
name=top_name,
|
|
967
|
+
display_name=top_schema.get("title") or top_name.replace("_", " ").title(),
|
|
968
|
+
info=(
|
|
969
|
+
top_schema.get("description") or "Provide JSON for this parameter (object or array)."
|
|
970
|
+
),
|
|
971
|
+
required=is_required, # Setting original schema
|
|
972
|
+
)
|
|
973
|
+
)
|
|
974
|
+
|
|
975
|
+
return processed_inputs
|
|
976
|
+
return result # noqa: TRY300
|
|
977
|
+
except ValueError as e:
|
|
978
|
+
logger.warning(f"Error generating inputs for {action_key}: {e}")
|
|
979
|
+
return []
|
|
980
|
+
|
|
981
|
+
def _get_inputs_for_all_actions(self) -> dict[str, list[InputTypes]]:
|
|
982
|
+
"""Return a mapping action_key → list[InputTypes] for every action."""
|
|
983
|
+
result: dict[str, list[InputTypes]] = {}
|
|
984
|
+
for key in self._actions_data:
|
|
985
|
+
result[key] = self._validate_schema_inputs(key)
|
|
986
|
+
return result
|
|
987
|
+
|
|
988
|
+
def _remove_inputs_from_build_config(self, build_config: dict, keep_for_action: str) -> None:
|
|
989
|
+
"""Remove parameter UI fields that belong to other actions."""
|
|
990
|
+
protected_keys = {"code", "entity_id", "api_key", "auth_link", "action_button", "tool_mode"}
|
|
991
|
+
|
|
992
|
+
for action_key, lf_inputs in self._get_inputs_for_all_actions().items():
|
|
993
|
+
if action_key == keep_for_action:
|
|
994
|
+
continue
|
|
995
|
+
for inp in lf_inputs:
|
|
996
|
+
if inp.name is not None and inp.name not in protected_keys:
|
|
997
|
+
build_config.pop(inp.name, None)
|
|
998
|
+
|
|
999
|
+
def _update_action_config(self, build_config: dict, selected_value: Any) -> None:
|
|
1000
|
+
"""Add or update parameter input fields for the chosen action."""
|
|
1001
|
+
if not selected_value:
|
|
1002
|
+
return
|
|
1003
|
+
|
|
1004
|
+
# The UI passes either a list with dict [{name: display_name}] OR the raw key
|
|
1005
|
+
if isinstance(selected_value, list) and selected_value:
|
|
1006
|
+
display_name = selected_value[0]["name"]
|
|
1007
|
+
else:
|
|
1008
|
+
display_name = selected_value
|
|
1009
|
+
|
|
1010
|
+
action_key = self.desanitize_action_name(display_name)
|
|
1011
|
+
|
|
1012
|
+
# Skip validation for default/placeholder values
|
|
1013
|
+
if action_key in ("disabled", "placeholder", ""):
|
|
1014
|
+
logger.debug(f"Skipping action config update for placeholder value: {action_key}")
|
|
1015
|
+
return
|
|
1016
|
+
|
|
1017
|
+
lf_inputs = self._validate_schema_inputs(action_key)
|
|
1018
|
+
|
|
1019
|
+
# First remove inputs belonging to other actions
|
|
1020
|
+
self._remove_inputs_from_build_config(build_config, action_key)
|
|
1021
|
+
|
|
1022
|
+
# Add / update the inputs for this action
|
|
1023
|
+
for inp in lf_inputs:
|
|
1024
|
+
if inp.name is not None:
|
|
1025
|
+
inp_dict = inp.to_dict() if hasattr(inp, "to_dict") else inp.__dict__.copy()
|
|
1026
|
+
|
|
1027
|
+
# Do not mutate input_types here; keep original configuration
|
|
1028
|
+
|
|
1029
|
+
inp_dict.setdefault("show", True) # visible once action selected
|
|
1030
|
+
# Preserve previously entered value if user already filled something
|
|
1031
|
+
if inp.name in build_config:
|
|
1032
|
+
existing_val = build_config[inp.name].get("value")
|
|
1033
|
+
inp_dict.setdefault("value", existing_val)
|
|
1034
|
+
build_config[inp.name] = inp_dict
|
|
1035
|
+
|
|
1036
|
+
# Ensure _all_fields includes new ones
|
|
1037
|
+
self._all_fields.update({i.name for i in lf_inputs if i.name is not None})
|
|
1038
|
+
|
|
1039
|
+
# Normalize input_types to prevent None values
|
|
1040
|
+
self.update_input_types(build_config)
|
|
1041
|
+
|
|
1042
|
+
def _is_tool_mode_enabled(self) -> bool:
|
|
1043
|
+
"""Check if tool_mode is currently enabled."""
|
|
1044
|
+
return getattr(self, "tool_mode", False)
|
|
1045
|
+
|
|
1046
|
+
def _set_action_visibility(self, build_config: dict, *, force_show: bool | None = None) -> None:
|
|
1047
|
+
"""Set action field visibility based on tool_mode state or forced value."""
|
|
1048
|
+
if force_show is not None:
|
|
1049
|
+
build_config["action_button"]["show"] = force_show
|
|
1050
|
+
else:
|
|
1051
|
+
# When tool_mode is enabled, hide action field
|
|
1052
|
+
build_config["action_button"]["show"] = not self._is_tool_mode_enabled()
|
|
1053
|
+
|
|
1054
|
+
def create_new_auth_config(self, app_name: str) -> str:
|
|
1055
|
+
"""Create a new auth config for the given app name."""
|
|
1056
|
+
composio = self._build_wrapper()
|
|
1057
|
+
auth_config = composio.auth_configs.create(toolkit=app_name, options={"type": "use_composio_managed_auth"})
|
|
1058
|
+
return auth_config.id
|
|
1059
|
+
|
|
1060
|
+
def _initiate_connection(self, app_name: str) -> tuple[str, str]:
|
|
1061
|
+
"""Initiate connection using link method and return (redirect_url, connection_id)."""
|
|
1062
|
+
try:
|
|
1063
|
+
composio = self._build_wrapper()
|
|
1064
|
+
|
|
1065
|
+
# Always create a new auth config (previous behavior)
|
|
1066
|
+
auth_config_id = self.create_new_auth_config(app_name)
|
|
1067
|
+
|
|
1068
|
+
connection_request = composio.connected_accounts.link(user_id=self.entity_id, auth_config_id=auth_config_id)
|
|
1069
|
+
|
|
1070
|
+
redirect_url = getattr(connection_request, "redirect_url", None)
|
|
1071
|
+
connection_id = getattr(connection_request, "id", None)
|
|
1072
|
+
|
|
1073
|
+
if not redirect_url or not redirect_url.startswith(("http://", "https://")):
|
|
1074
|
+
msg = "Invalid redirect URL received from Composio"
|
|
1075
|
+
raise ValueError(msg)
|
|
1076
|
+
|
|
1077
|
+
if not connection_id:
|
|
1078
|
+
msg = "No connection ID received from Composio"
|
|
1079
|
+
raise ValueError(msg)
|
|
1080
|
+
|
|
1081
|
+
logger.info(f"Connection initiated for {app_name}: {redirect_url} (ID: {connection_id})")
|
|
1082
|
+
return redirect_url, connection_id # noqa: TRY300
|
|
1083
|
+
|
|
1084
|
+
except (ValueError, ConnectionError, TypeError, AttributeError) as e:
|
|
1085
|
+
logger.error(f"Error initiating connection for {app_name}: {e}")
|
|
1086
|
+
msg = f"Failed to initiate connection: {e}"
|
|
1087
|
+
raise ValueError(msg) from e
|
|
1088
|
+
|
|
1089
|
+
def _check_connection_status_by_id(self, connection_id: str) -> str | None:
|
|
1090
|
+
"""Check status of a specific connection by ID. Returns status or None if not found."""
|
|
1091
|
+
try:
|
|
1092
|
+
composio = self._build_wrapper()
|
|
1093
|
+
connection = composio.connected_accounts.get(nanoid=connection_id)
|
|
1094
|
+
status = getattr(connection, "status", None)
|
|
1095
|
+
logger.info(f"Connection {connection_id} status: {status}")
|
|
1096
|
+
except (ValueError, ConnectionError) as e:
|
|
1097
|
+
logger.error(f"Error checking connection {connection_id}: {e}")
|
|
1098
|
+
return None
|
|
1099
|
+
else:
|
|
1100
|
+
return status
|
|
1101
|
+
|
|
1102
|
+
def _find_active_connection_for_app(self, app_name: str) -> tuple[str, str] | None:
|
|
1103
|
+
"""Find any ACTIVE connection for this app/user. Returns (connection_id, status) or None."""
|
|
1104
|
+
try:
|
|
1105
|
+
composio = self._build_wrapper()
|
|
1106
|
+
connection_list = composio.connected_accounts.list(
|
|
1107
|
+
user_ids=[self.entity_id], toolkit_slugs=[app_name.lower()]
|
|
1108
|
+
)
|
|
1109
|
+
|
|
1110
|
+
if connection_list and hasattr(connection_list, "items") and connection_list.items:
|
|
1111
|
+
for connection in connection_list.items:
|
|
1112
|
+
connection_id = getattr(connection, "id", None)
|
|
1113
|
+
connection_status = getattr(connection, "status", None)
|
|
1114
|
+
if connection_status == "ACTIVE" and connection_id:
|
|
1115
|
+
logger.info(f"Found existing ACTIVE connection for {app_name}: {connection_id}")
|
|
1116
|
+
return connection_id, connection_status
|
|
1117
|
+
|
|
1118
|
+
except (ValueError, ConnectionError) as e:
|
|
1119
|
+
logger.error(f"Error finding active connection for {app_name}: {e}")
|
|
1120
|
+
return None
|
|
1121
|
+
else:
|
|
1122
|
+
return None
|
|
1123
|
+
|
|
1124
|
+
def _get_connection_auth_info(self, connection_id: str) -> tuple[str | None, bool | None]:
|
|
1125
|
+
"""Return (auth_scheme, is_composio_managed) for a given connection id, if available."""
|
|
1126
|
+
try:
|
|
1127
|
+
composio = self._build_wrapper()
|
|
1128
|
+
connection = composio.connected_accounts.get(nanoid=connection_id)
|
|
1129
|
+
auth_config = getattr(connection, "auth_config", None)
|
|
1130
|
+
if auth_config is None and hasattr(connection, "__dict__"):
|
|
1131
|
+
auth_config = getattr(connection.__dict__, "auth_config", None)
|
|
1132
|
+
scheme = getattr(auth_config, "auth_scheme", None) if auth_config else None
|
|
1133
|
+
is_managed = getattr(auth_config, "is_composio_managed", None) if auth_config else None
|
|
1134
|
+
except (AttributeError, ValueError, ConnectionError, TypeError) as e:
|
|
1135
|
+
logger.debug(f"Could not retrieve auth info for connection {connection_id}: {e}")
|
|
1136
|
+
return None, None
|
|
1137
|
+
else:
|
|
1138
|
+
return scheme, is_managed
|
|
1139
|
+
|
|
1140
|
+
def _disconnect_specific_connection(self, connection_id: str) -> None:
|
|
1141
|
+
"""Disconnect a specific Composio connection by ID."""
|
|
1142
|
+
try:
|
|
1143
|
+
composio = self._build_wrapper()
|
|
1144
|
+
composio.connected_accounts.delete(nanoid=connection_id)
|
|
1145
|
+
logger.info(f"✅ Disconnected specific connection: {connection_id}")
|
|
1146
|
+
|
|
1147
|
+
except Exception as e:
|
|
1148
|
+
logger.error(f"Error disconnecting connection {connection_id}: {e}")
|
|
1149
|
+
msg = f"Failed to disconnect connection {connection_id}: {e}"
|
|
1150
|
+
raise ValueError(msg) from e
|
|
1151
|
+
|
|
1152
|
+
def _to_plain_dict(self, obj: Any) -> Any:
|
|
1153
|
+
"""Recursively convert SDK models/lists to plain Python dicts/lists for safe .get access."""
|
|
1154
|
+
try:
|
|
1155
|
+
if isinstance(obj, dict):
|
|
1156
|
+
return {k: self._to_plain_dict(v) for k, v in obj.items()}
|
|
1157
|
+
if isinstance(obj, (list, tuple, set)):
|
|
1158
|
+
return [self._to_plain_dict(v) for v in obj]
|
|
1159
|
+
if hasattr(obj, "model_dump"):
|
|
1160
|
+
try:
|
|
1161
|
+
return self._to_plain_dict(obj.model_dump())
|
|
1162
|
+
except (TypeError, AttributeError, ValueError):
|
|
1163
|
+
pass
|
|
1164
|
+
if hasattr(obj, "__dict__") and not isinstance(obj, (str, bytes)):
|
|
1165
|
+
try:
|
|
1166
|
+
return self._to_plain_dict({k: v for k, v in obj.__dict__.items() if not k.startswith("_")})
|
|
1167
|
+
except (TypeError, AttributeError, ValueError):
|
|
1168
|
+
pass
|
|
1169
|
+
except (TypeError, ValueError, AttributeError, RecursionError):
|
|
1170
|
+
return obj
|
|
1171
|
+
else:
|
|
1172
|
+
return obj
|
|
1173
|
+
|
|
1174
|
+
def _get_toolkit_schema(self) -> dict[str, Any] | None:
|
|
1175
|
+
"""Fetch and cache toolkit schema for auth details (modes and fields)."""
|
|
1176
|
+
if self._toolkit_schema is not None:
|
|
1177
|
+
return self._toolkit_schema
|
|
1178
|
+
try:
|
|
1179
|
+
composio = self._build_wrapper()
|
|
1180
|
+
app_slug = getattr(self, "app_name", "").lower()
|
|
1181
|
+
if not app_slug:
|
|
1182
|
+
return None
|
|
1183
|
+
# Use the correct Composio SDK method
|
|
1184
|
+
schema = composio.toolkits.get(slug=app_slug)
|
|
1185
|
+
self._toolkit_schema = self._to_plain_dict(schema)
|
|
1186
|
+
except (AttributeError, ValueError, ConnectionError, TypeError) as e:
|
|
1187
|
+
logger.debug(f"Could not retrieve toolkit schema for {getattr(self, 'app_name', '')}: {e}")
|
|
1188
|
+
return None
|
|
1189
|
+
else:
|
|
1190
|
+
return self._toolkit_schema
|
|
1191
|
+
|
|
1192
|
+
def _extract_auth_modes_from_schema(self, schema: dict[str, Any] | None) -> list[str]:
|
|
1193
|
+
"""Return available auth modes (e.g., OAUTH2, API_KEY) from toolkit schema."""
|
|
1194
|
+
if not schema:
|
|
1195
|
+
return []
|
|
1196
|
+
modes: list[str] = []
|
|
1197
|
+
# composio_managed_auth_schemes: list[str]
|
|
1198
|
+
managed = schema.get("composio_managed_auth_schemes") or schema.get("composioManagedAuthSchemes") or []
|
|
1199
|
+
has_managed_schemes = isinstance(managed, list) and len(managed) > 0
|
|
1200
|
+
|
|
1201
|
+
# Add "Composio_Managed" as first option if there are managed schemes
|
|
1202
|
+
if has_managed_schemes:
|
|
1203
|
+
modes.append("Composio_Managed")
|
|
1204
|
+
|
|
1205
|
+
# auth_config_details: list with entries containing mode
|
|
1206
|
+
details = schema.get("auth_config_details") or schema.get("authConfigDetails") or []
|
|
1207
|
+
for item in details:
|
|
1208
|
+
mode = item.get("mode") or item.get("auth_method")
|
|
1209
|
+
if isinstance(mode, str) and mode not in modes:
|
|
1210
|
+
modes.append(mode)
|
|
1211
|
+
return modes
|
|
1212
|
+
|
|
1213
|
+
def _render_auth_mode_dropdown(self, build_config: dict, modes: list[str]) -> None:
|
|
1214
|
+
"""Populate and show the auth_mode control; if only one mode, show as selected chip-style list."""
|
|
1215
|
+
try:
|
|
1216
|
+
build_config.setdefault("auth_mode", {})
|
|
1217
|
+
auth_mode_cfg = build_config["auth_mode"]
|
|
1218
|
+
# Prefer the connected scheme if known; otherwise use schema-provided modes as-is
|
|
1219
|
+
stored_scheme = (build_config.get("auth_link") or {}).get("auth_scheme")
|
|
1220
|
+
if isinstance(stored_scheme, str) and stored_scheme:
|
|
1221
|
+
modes = [stored_scheme]
|
|
1222
|
+
|
|
1223
|
+
if len(modes) <= 1:
|
|
1224
|
+
# Single mode → show a pill in the auth_mode slot (right after API Key)
|
|
1225
|
+
selected = modes[0] if modes else ""
|
|
1226
|
+
try:
|
|
1227
|
+
pill = TabInput(
|
|
1228
|
+
name="auth_mode",
|
|
1229
|
+
display_name="Auth Mode",
|
|
1230
|
+
options=[selected] if selected else [],
|
|
1231
|
+
value=selected,
|
|
1232
|
+
).to_dict()
|
|
1233
|
+
pill["show"] = True
|
|
1234
|
+
build_config["auth_mode"] = pill
|
|
1235
|
+
except (TypeError, ValueError, AttributeError):
|
|
1236
|
+
build_config["auth_mode"] = {
|
|
1237
|
+
"name": "auth_mode",
|
|
1238
|
+
"display_name": "Auth Mode",
|
|
1239
|
+
"type": "tab",
|
|
1240
|
+
"options": [selected],
|
|
1241
|
+
"value": selected,
|
|
1242
|
+
"show": True,
|
|
1243
|
+
}
|
|
1244
|
+
else:
|
|
1245
|
+
# Multiple modes → normal dropdown, hide the display chip if present
|
|
1246
|
+
auth_mode_cfg["options"] = modes
|
|
1247
|
+
auth_mode_cfg["show"] = True
|
|
1248
|
+
if not auth_mode_cfg.get("value") and modes:
|
|
1249
|
+
auth_mode_cfg["value"] = modes[0]
|
|
1250
|
+
if "auth_mode_display" in build_config:
|
|
1251
|
+
build_config["auth_mode_display"]["show"] = False
|
|
1252
|
+
auth_mode_cfg["helper_text"] = "Choose how to authenticate with the toolkit."
|
|
1253
|
+
except (TypeError, ValueError, AttributeError) as e:
|
|
1254
|
+
logger.debug(f"Failed to render auth_mode dropdown: {e}")
|
|
1255
|
+
|
|
1256
|
+
def _insert_field_before_action_button(self, build_config: dict, field_name: str, field_data: dict) -> None:
|
|
1257
|
+
"""Insert a field in the correct position (before action_button) in build_config."""
|
|
1258
|
+
# If field already exists, don't add it again
|
|
1259
|
+
if field_name in build_config:
|
|
1260
|
+
return
|
|
1261
|
+
|
|
1262
|
+
# If action_button doesn't exist, just add the field normally
|
|
1263
|
+
if "action_button" not in build_config:
|
|
1264
|
+
build_config[field_name] = field_data
|
|
1265
|
+
return
|
|
1266
|
+
|
|
1267
|
+
# Find all the keys we need to preserve order for
|
|
1268
|
+
keys_before_action = []
|
|
1269
|
+
keys_after_action = []
|
|
1270
|
+
found_action = False
|
|
1271
|
+
|
|
1272
|
+
for key in list(build_config.keys()):
|
|
1273
|
+
if key == "action_button":
|
|
1274
|
+
found_action = True
|
|
1275
|
+
keys_after_action.append(key)
|
|
1276
|
+
elif found_action:
|
|
1277
|
+
keys_after_action.append(key)
|
|
1278
|
+
else:
|
|
1279
|
+
keys_before_action.append(key)
|
|
1280
|
+
|
|
1281
|
+
# Create new ordered dict
|
|
1282
|
+
new_config = {}
|
|
1283
|
+
|
|
1284
|
+
# Add all fields before action_button
|
|
1285
|
+
for key in keys_before_action:
|
|
1286
|
+
new_config[key] = build_config[key]
|
|
1287
|
+
|
|
1288
|
+
# Add the new field
|
|
1289
|
+
new_config[field_name] = field_data
|
|
1290
|
+
|
|
1291
|
+
# Add action_button and all fields after it
|
|
1292
|
+
for key in keys_after_action:
|
|
1293
|
+
new_config[key] = build_config[key]
|
|
1294
|
+
|
|
1295
|
+
# Clear and update build_config to maintain reference
|
|
1296
|
+
build_config.clear()
|
|
1297
|
+
build_config.update(new_config)
|
|
1298
|
+
|
|
1299
|
+
def _clear_auth_dynamic_fields(self, build_config: dict) -> None:
|
|
1300
|
+
for fname in list(self._auth_dynamic_fields):
|
|
1301
|
+
if fname in build_config and isinstance(build_config[fname], dict):
|
|
1302
|
+
# Hide and reset instead of removing
|
|
1303
|
+
build_config[fname]["show"] = False
|
|
1304
|
+
build_config[fname]["value"] = ""
|
|
1305
|
+
build_config[fname]["required"] = False
|
|
1306
|
+
self._auth_dynamic_fields.clear()
|
|
1307
|
+
|
|
1308
|
+
def _add_text_field(
|
|
1309
|
+
self,
|
|
1310
|
+
build_config: dict,
|
|
1311
|
+
name: str,
|
|
1312
|
+
display_name: str,
|
|
1313
|
+
info: str | None,
|
|
1314
|
+
*,
|
|
1315
|
+
required: bool,
|
|
1316
|
+
default_value: str | None = None,
|
|
1317
|
+
) -> None:
|
|
1318
|
+
"""Update existing field or add new text input for custom auth forms."""
|
|
1319
|
+
# Check if field already exists in build_config (pre-defined placeholder)
|
|
1320
|
+
if name in build_config:
|
|
1321
|
+
# Update existing field properties
|
|
1322
|
+
build_config[name]["display_name"] = display_name or name.replace("_", " ").title()
|
|
1323
|
+
build_config[name]["info"] = info or ""
|
|
1324
|
+
build_config[name]["required"] = required
|
|
1325
|
+
build_config[name]["show"] = True
|
|
1326
|
+
if default_value is not None and default_value != "":
|
|
1327
|
+
build_config[name]["value"] = default_value
|
|
1328
|
+
else:
|
|
1329
|
+
# Create new field if it doesn't exist
|
|
1330
|
+
# Use SecretStrInput for sensitive fields
|
|
1331
|
+
sensitive_fields = {
|
|
1332
|
+
"client_id",
|
|
1333
|
+
"client_secret",
|
|
1334
|
+
"api_key",
|
|
1335
|
+
"api_key_field",
|
|
1336
|
+
"generic_api_key",
|
|
1337
|
+
"token",
|
|
1338
|
+
"access_token",
|
|
1339
|
+
"refresh_token",
|
|
1340
|
+
"password",
|
|
1341
|
+
"bearer_token",
|
|
1342
|
+
"authorization_code",
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
if name in sensitive_fields:
|
|
1346
|
+
field = SecretStrInput(
|
|
1347
|
+
name=name,
|
|
1348
|
+
display_name=display_name or name.replace("_", " ").title(),
|
|
1349
|
+
info=info or "",
|
|
1350
|
+
required=required,
|
|
1351
|
+
real_time_refresh=True,
|
|
1352
|
+
show=True,
|
|
1353
|
+
).to_dict()
|
|
1354
|
+
else:
|
|
1355
|
+
field = StrInput(
|
|
1356
|
+
name=name,
|
|
1357
|
+
display_name=display_name or name.replace("_", " ").title(),
|
|
1358
|
+
info=info or "",
|
|
1359
|
+
required=required,
|
|
1360
|
+
real_time_refresh=True,
|
|
1361
|
+
show=True,
|
|
1362
|
+
).to_dict()
|
|
1363
|
+
|
|
1364
|
+
if default_value is not None and default_value != "":
|
|
1365
|
+
field["value"] = default_value
|
|
1366
|
+
|
|
1367
|
+
# Insert the field in the correct position (before action_button)
|
|
1368
|
+
self._insert_field_before_action_button(build_config, name, field)
|
|
1369
|
+
|
|
1370
|
+
self._auth_dynamic_fields.add(name)
|
|
1371
|
+
# Also add to class-level cache for better tracking
|
|
1372
|
+
self.__class__.get_all_auth_field_names().add(name)
|
|
1373
|
+
|
|
1374
|
+
def _render_custom_auth_fields(self, build_config: dict, schema: dict[str, Any], mode: str) -> None:
|
|
1375
|
+
"""Render fields for custom auth based on schema auth_config_details sections."""
|
|
1376
|
+
details = schema.get("auth_config_details") or schema.get("authConfigDetails") or []
|
|
1377
|
+
selected = None
|
|
1378
|
+
for item in details:
|
|
1379
|
+
if (item.get("mode") or item.get("auth_method")) == mode:
|
|
1380
|
+
selected = item
|
|
1381
|
+
break
|
|
1382
|
+
if not selected:
|
|
1383
|
+
return
|
|
1384
|
+
fields = selected.get("fields") or {}
|
|
1385
|
+
|
|
1386
|
+
# Helper function to process fields
|
|
1387
|
+
def process_fields(field_list: list, *, required: bool) -> None:
|
|
1388
|
+
for field in field_list:
|
|
1389
|
+
name = field.get("name")
|
|
1390
|
+
if not name:
|
|
1391
|
+
continue
|
|
1392
|
+
# Skip Access Token field (bearer_token)
|
|
1393
|
+
if name == "bearer_token":
|
|
1394
|
+
continue
|
|
1395
|
+
# Skip fields with default values for both required and optional fields
|
|
1396
|
+
default_val = field.get("default")
|
|
1397
|
+
if default_val is not None:
|
|
1398
|
+
continue
|
|
1399
|
+
disp = field.get("display_name") or field.get("displayName") or name
|
|
1400
|
+
desc = field.get("description")
|
|
1401
|
+
self._add_text_field(build_config, name, disp, desc, required=required, default_value=default_val)
|
|
1402
|
+
|
|
1403
|
+
# Only process AuthConfigCreation fields (for custom OAuth2, etc.)
|
|
1404
|
+
# Connection initiation fields are now handled on Composio page via link method
|
|
1405
|
+
creation = fields.get("auth_config_creation") or fields.get("authConfigCreation") or {}
|
|
1406
|
+
# Process required fields
|
|
1407
|
+
process_fields(creation.get("required", []), required=True)
|
|
1408
|
+
# Process optional fields (excluding those with defaults and bearer_token)
|
|
1409
|
+
process_fields(creation.get("optional", []), required=False)
|
|
1410
|
+
|
|
1411
|
+
def _collect_all_auth_field_names(self, schema: dict[str, Any] | None) -> set[str]:
|
|
1412
|
+
names: set[str] = set()
|
|
1413
|
+
if not schema:
|
|
1414
|
+
return names
|
|
1415
|
+
details = schema.get("auth_config_details") or schema.get("authConfigDetails") or []
|
|
1416
|
+
for item in details:
|
|
1417
|
+
fields = (item.get("fields") or {}) if isinstance(item, dict) else {}
|
|
1418
|
+
for section_key in (
|
|
1419
|
+
"auth_config_creation",
|
|
1420
|
+
"authConfigCreation",
|
|
1421
|
+
"connected_account_initiation",
|
|
1422
|
+
"connectedAccountInitiation",
|
|
1423
|
+
):
|
|
1424
|
+
section = fields.get(section_key) or {}
|
|
1425
|
+
for bucket in ("required", "optional"):
|
|
1426
|
+
for entry in section.get(bucket, []) or []:
|
|
1427
|
+
name = entry.get("name") if isinstance(entry, dict) else None
|
|
1428
|
+
if name:
|
|
1429
|
+
names.add(name)
|
|
1430
|
+
# Add to class-level cache for tracking all discovered auth fields
|
|
1431
|
+
self.__class__.get_all_auth_field_names().add(name)
|
|
1432
|
+
# Only use names discovered from the toolkit schema; do not add aliases
|
|
1433
|
+
return names
|
|
1434
|
+
|
|
1435
|
+
def _clear_auth_fields_from_schema(self, build_config: dict, schema: dict[str, Any] | None) -> None:
|
|
1436
|
+
all_names = self._collect_all_auth_field_names(schema)
|
|
1437
|
+
for name in list(all_names):
|
|
1438
|
+
if name in build_config and isinstance(build_config[name], dict):
|
|
1439
|
+
# Hide and reset instead of removing to ensure UI updates immediately
|
|
1440
|
+
build_config[name]["show"] = False
|
|
1441
|
+
build_config[name]["value"] = ""
|
|
1442
|
+
# Also clear any tracked dynamic fields
|
|
1443
|
+
self._clear_auth_dynamic_fields(build_config)
|
|
1444
|
+
|
|
1445
|
+
def update_input_types(self, build_config: dict) -> dict:
|
|
1446
|
+
"""Normalize input_types to [] wherever None appears in the build_config template."""
|
|
1447
|
+
try:
|
|
1448
|
+
for key, value in list(build_config.items()):
|
|
1449
|
+
if isinstance(value, dict):
|
|
1450
|
+
if value.get("input_types") is None:
|
|
1451
|
+
build_config[key]["input_types"] = []
|
|
1452
|
+
elif hasattr(value, "input_types") and value.input_types is None:
|
|
1453
|
+
with suppress(AttributeError, TypeError):
|
|
1454
|
+
value.input_types = []
|
|
1455
|
+
except (RuntimeError, KeyError):
|
|
1456
|
+
pass
|
|
1457
|
+
return build_config
|
|
1458
|
+
|
|
1459
|
+
def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict:
|
|
1460
|
+
"""Update build config for auth and action selection."""
|
|
1461
|
+
# Avoid normalizing legacy input_types here; rely on upstream fixes
|
|
1462
|
+
|
|
1463
|
+
# BULLETPROOF tool_mode checking - check all possible places where tool_mode could be stored
|
|
1464
|
+
instance_tool_mode = getattr(self, "tool_mode", False) if hasattr(self, "tool_mode") else False
|
|
1465
|
+
|
|
1466
|
+
# Check build_config for tool_mode in multiple possible structures
|
|
1467
|
+
build_config_tool_mode = False
|
|
1468
|
+
if "tool_mode" in build_config:
|
|
1469
|
+
tool_mode_config = build_config["tool_mode"]
|
|
1470
|
+
if isinstance(tool_mode_config, dict):
|
|
1471
|
+
build_config_tool_mode = tool_mode_config.get("value", False)
|
|
1472
|
+
else:
|
|
1473
|
+
build_config_tool_mode = bool(tool_mode_config)
|
|
1474
|
+
|
|
1475
|
+
# If this is a tool_mode change, update BOTH instance variable AND build_config
|
|
1476
|
+
if field_name == "tool_mode":
|
|
1477
|
+
self.tool_mode = field_value
|
|
1478
|
+
instance_tool_mode = field_value
|
|
1479
|
+
# CRITICAL: Store tool_mode state in build_config so it persists
|
|
1480
|
+
if "tool_mode" not in build_config:
|
|
1481
|
+
build_config["tool_mode"] = {}
|
|
1482
|
+
if isinstance(build_config["tool_mode"], dict):
|
|
1483
|
+
build_config["tool_mode"]["value"] = field_value
|
|
1484
|
+
build_config_tool_mode = field_value
|
|
1485
|
+
|
|
1486
|
+
# Current tool_mode is True if ANY source indicates it's enabled
|
|
1487
|
+
current_tool_mode = instance_tool_mode or build_config_tool_mode or (field_name == "tool_mode" and field_value)
|
|
1488
|
+
|
|
1489
|
+
# CRITICAL: Ensure dynamic action metadata is available whenever we have an API key
|
|
1490
|
+
# This must happen BEFORE any early returns to ensure tools are always loaded
|
|
1491
|
+
api_key_available = hasattr(self, "api_key") and self.api_key
|
|
1492
|
+
|
|
1493
|
+
# Check if we need to populate actions - but also check cache availability
|
|
1494
|
+
actions_available = bool(self._actions_data)
|
|
1495
|
+
toolkit_slug = getattr(self, "app_name", "").lower()
|
|
1496
|
+
cached_actions_available = toolkit_slug in self.__class__.get_actions_cache()
|
|
1497
|
+
|
|
1498
|
+
should_populate = False
|
|
1499
|
+
|
|
1500
|
+
if (field_name == "api_key" and field_value) or (
|
|
1501
|
+
api_key_available and not actions_available and not cached_actions_available
|
|
1502
|
+
):
|
|
1503
|
+
should_populate = True
|
|
1504
|
+
elif api_key_available and not actions_available and cached_actions_available:
|
|
1505
|
+
self._populate_actions_data()
|
|
1506
|
+
|
|
1507
|
+
if should_populate:
|
|
1508
|
+
logger.info(f"Populating actions data for {getattr(self, 'app_name', 'unknown')}...")
|
|
1509
|
+
self._populate_actions_data()
|
|
1510
|
+
logger.info(f"Actions populated: {len(self._actions_data)} actions found")
|
|
1511
|
+
# Also fetch toolkit schema to drive auth UI
|
|
1512
|
+
schema = self._get_toolkit_schema()
|
|
1513
|
+
modes = self._extract_auth_modes_from_schema(schema)
|
|
1514
|
+
self._render_auth_mode_dropdown(build_config, modes)
|
|
1515
|
+
# If a mode is selected (including auto-default), render custom fields when not managed
|
|
1516
|
+
try:
|
|
1517
|
+
selected_mode = (build_config.get("auth_mode") or {}).get("value")
|
|
1518
|
+
managed = (schema or {}).get("composio_managed_auth_schemes") or []
|
|
1519
|
+
# Don't render custom fields if "Composio_Managed" is selected
|
|
1520
|
+
# For API_KEY and other token modes, no fields are needed as they use link method
|
|
1521
|
+
token_modes = ["API_KEY", "BEARER_TOKEN", "BASIC"]
|
|
1522
|
+
if selected_mode and selected_mode not in ["Composio_Managed", *token_modes]:
|
|
1523
|
+
self._clear_auth_dynamic_fields(build_config)
|
|
1524
|
+
self._render_custom_auth_fields(build_config, schema or {}, selected_mode)
|
|
1525
|
+
# Already reordered in _render_custom_auth_fields
|
|
1526
|
+
elif selected_mode in token_modes:
|
|
1527
|
+
# Clear any existing auth fields for token-based modes
|
|
1528
|
+
self._clear_auth_dynamic_fields(build_config)
|
|
1529
|
+
except (TypeError, ValueError, AttributeError):
|
|
1530
|
+
pass
|
|
1531
|
+
|
|
1532
|
+
# CRITICAL: Set action options if we have actions (either from fresh population or cache)
|
|
1533
|
+
if self._actions_data:
|
|
1534
|
+
self._build_action_maps()
|
|
1535
|
+
build_config["action_button"]["options"] = [
|
|
1536
|
+
{"name": self.sanitize_action_name(action), "metadata": action} for action in self._actions_data
|
|
1537
|
+
]
|
|
1538
|
+
logger.info(f"Action options set in build_config: {len(build_config['action_button']['options'])} options")
|
|
1539
|
+
# Always (re)populate auth_mode as well when actions are available
|
|
1540
|
+
schema = self._get_toolkit_schema()
|
|
1541
|
+
modes = self._extract_auth_modes_from_schema(schema)
|
|
1542
|
+
self._render_auth_mode_dropdown(build_config, modes)
|
|
1543
|
+
else:
|
|
1544
|
+
build_config["action_button"]["options"] = []
|
|
1545
|
+
logger.warning("No actions found, setting empty options")
|
|
1546
|
+
|
|
1547
|
+
# clear stored connection_id when api_key is changed
|
|
1548
|
+
if field_name == "api_key" and field_value:
|
|
1549
|
+
stored_connection_before = build_config.get("auth_link", {}).get("connection_id")
|
|
1550
|
+
if "auth_link" in build_config and "connection_id" in build_config["auth_link"]:
|
|
1551
|
+
build_config["auth_link"].pop("connection_id", None)
|
|
1552
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1553
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
1554
|
+
logger.info(f"Cleared stored connection_id '{stored_connection_before}' due to API key change")
|
|
1555
|
+
else:
|
|
1556
|
+
logger.info("DEBUG: EARLY No stored connection_id to clear on API key change")
|
|
1557
|
+
# Also clear any stored scheme and reset auth mode UI when API key changes
|
|
1558
|
+
build_config.setdefault("auth_link", {})
|
|
1559
|
+
build_config["auth_link"].pop("auth_scheme", None)
|
|
1560
|
+
build_config.setdefault("auth_mode", {})
|
|
1561
|
+
build_config["auth_mode"].pop("value", None)
|
|
1562
|
+
build_config["auth_mode"]["show"] = True
|
|
1563
|
+
# If auth_mode is currently a TabInput pill, convert it back to dropdown
|
|
1564
|
+
if isinstance(build_config.get("auth_mode"), dict) and build_config["auth_mode"].get("type") == "tab":
|
|
1565
|
+
build_config["auth_mode"].pop("type", None)
|
|
1566
|
+
# Re-render dropdown options for the new API key context
|
|
1567
|
+
try:
|
|
1568
|
+
schema = self._get_toolkit_schema()
|
|
1569
|
+
modes = self._extract_auth_modes_from_schema(schema)
|
|
1570
|
+
# Rebuild as DropdownInput to ensure proper rendering
|
|
1571
|
+
dd = DropdownInput(
|
|
1572
|
+
name="auth_mode",
|
|
1573
|
+
display_name="Auth Mode",
|
|
1574
|
+
options=modes,
|
|
1575
|
+
placeholder="Select auth mode",
|
|
1576
|
+
toggle=True,
|
|
1577
|
+
toggle_disable=True,
|
|
1578
|
+
show=True,
|
|
1579
|
+
real_time_refresh=True,
|
|
1580
|
+
helper_text="Choose how to authenticate with the toolkit.",
|
|
1581
|
+
).to_dict()
|
|
1582
|
+
build_config["auth_mode"] = dd
|
|
1583
|
+
except (TypeError, ValueError, AttributeError):
|
|
1584
|
+
pass
|
|
1585
|
+
# NEW: Clear any selected action and hide generated fields when API key is re-entered
|
|
1586
|
+
try:
|
|
1587
|
+
if "action_button" in build_config and isinstance(build_config["action_button"], dict):
|
|
1588
|
+
build_config["action_button"]["value"] = "disabled"
|
|
1589
|
+
self._hide_all_action_fields(build_config)
|
|
1590
|
+
except (TypeError, ValueError, AttributeError):
|
|
1591
|
+
pass
|
|
1592
|
+
|
|
1593
|
+
# Handle disconnect operations when tool mode is enabled
|
|
1594
|
+
if field_name == "auth_link" and field_value == "disconnect":
|
|
1595
|
+
# Soft disconnect: do not delete remote account; only clear local state
|
|
1596
|
+
stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
|
|
1597
|
+
if not stored_connection_id:
|
|
1598
|
+
logger.warning("No connection ID found to disconnect (soft)")
|
|
1599
|
+
build_config.setdefault("auth_link", {})
|
|
1600
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1601
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
1602
|
+
build_config["auth_link"].pop("connection_id", None)
|
|
1603
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
1604
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
1605
|
+
return self.update_input_types(build_config)
|
|
1606
|
+
|
|
1607
|
+
# Handle auth mode change -> render appropriate fields based on schema
|
|
1608
|
+
if field_name == "auth_mode":
|
|
1609
|
+
schema = self._get_toolkit_schema() or {}
|
|
1610
|
+
# Clear any previously rendered auth fields when switching modes
|
|
1611
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1612
|
+
mode = field_value if isinstance(field_value, str) else (build_config.get("auth_mode", {}).get("value"))
|
|
1613
|
+
if not mode and isinstance(build_config.get("auth_mode"), dict):
|
|
1614
|
+
mode = build_config["auth_mode"].get("value")
|
|
1615
|
+
# Always show auth_link for any mode
|
|
1616
|
+
build_config.setdefault("auth_link", {})
|
|
1617
|
+
build_config["auth_link"]["show"] = False
|
|
1618
|
+
# Reset connection state when switching modes
|
|
1619
|
+
build_config["auth_link"].pop("connection_id", None)
|
|
1620
|
+
build_config["auth_link"].pop("auth_config_id", None)
|
|
1621
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1622
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
1623
|
+
# If an ACTIVE connection already exists, don't render any auth fields
|
|
1624
|
+
existing_active = self._find_active_connection_for_app(self.app_name)
|
|
1625
|
+
if existing_active:
|
|
1626
|
+
connection_id, _ = existing_active
|
|
1627
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1628
|
+
build_config.setdefault("create_auth_config", {})
|
|
1629
|
+
build_config["create_auth_config"]["show"] = False
|
|
1630
|
+
build_config["auth_link"]["value"] = "validated"
|
|
1631
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1632
|
+
build_config["auth_link"]["connection_id"] = connection_id
|
|
1633
|
+
# Reflect the connected auth scheme in the UI
|
|
1634
|
+
scheme, _ = self._get_connection_auth_info(connection_id)
|
|
1635
|
+
if scheme:
|
|
1636
|
+
build_config.setdefault("auth_link", {})
|
|
1637
|
+
build_config["auth_link"]["auth_scheme"] = scheme
|
|
1638
|
+
build_config.setdefault("auth_mode", {})
|
|
1639
|
+
build_config["auth_mode"]["value"] = scheme
|
|
1640
|
+
build_config["auth_mode"]["options"] = [scheme]
|
|
1641
|
+
build_config["auth_mode"]["show"] = False
|
|
1642
|
+
try:
|
|
1643
|
+
pill = TabInput(
|
|
1644
|
+
name="auth_mode",
|
|
1645
|
+
display_name="Auth Mode",
|
|
1646
|
+
options=[scheme],
|
|
1647
|
+
value=scheme,
|
|
1648
|
+
).to_dict()
|
|
1649
|
+
pill["show"] = True
|
|
1650
|
+
build_config["auth_mode"] = pill
|
|
1651
|
+
except (TypeError, ValueError, AttributeError):
|
|
1652
|
+
build_config["auth_mode"] = {
|
|
1653
|
+
"name": "auth_mode",
|
|
1654
|
+
"display_name": "Auth Mode",
|
|
1655
|
+
"type": "tab",
|
|
1656
|
+
"options": [scheme],
|
|
1657
|
+
"value": scheme,
|
|
1658
|
+
"show": True,
|
|
1659
|
+
}
|
|
1660
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1661
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1662
|
+
return self.update_input_types(build_config)
|
|
1663
|
+
if mode:
|
|
1664
|
+
managed = schema.get("composio_managed_auth_schemes") or []
|
|
1665
|
+
# Always hide the Create Auth Config control (used internally only)
|
|
1666
|
+
build_config.setdefault("create_auth_config", {})
|
|
1667
|
+
build_config["create_auth_config"]["show"] = False
|
|
1668
|
+
build_config["create_auth_config"]["display_name"] = ""
|
|
1669
|
+
build_config["create_auth_config"]["value"] = ""
|
|
1670
|
+
build_config["create_auth_config"]["helper_text"] = ""
|
|
1671
|
+
build_config["create_auth_config"]["options"] = ["create"]
|
|
1672
|
+
if mode == "Composio_Managed":
|
|
1673
|
+
# Composio_Managed → no extra fields needed
|
|
1674
|
+
pass
|
|
1675
|
+
elif mode in ["API_KEY", "BEARER_TOKEN", "BASIC"]:
|
|
1676
|
+
# Token-based modes → no fields needed, user enters on Composio page via link
|
|
1677
|
+
pass
|
|
1678
|
+
elif isinstance(managed, list) and mode in managed:
|
|
1679
|
+
# This is a specific managed auth scheme (e.g., OAUTH2) but user can still choose custom
|
|
1680
|
+
# So we should render custom fields for this mode
|
|
1681
|
+
self._render_custom_auth_fields(build_config, schema, mode)
|
|
1682
|
+
# Already reordered in _render_custom_auth_fields
|
|
1683
|
+
else:
|
|
1684
|
+
# Custom → render only required fields based on the toolkit schema
|
|
1685
|
+
self._render_custom_auth_fields(build_config, schema, mode)
|
|
1686
|
+
# Already reordered in _render_custom_auth_fields
|
|
1687
|
+
return self.update_input_types(build_config)
|
|
1688
|
+
|
|
1689
|
+
# Handle connection initiation when tool mode is enabled
|
|
1690
|
+
if field_name == "auth_link" and isinstance(field_value, dict):
|
|
1691
|
+
try:
|
|
1692
|
+
toolkit_slug = self.app_name.lower()
|
|
1693
|
+
|
|
1694
|
+
# First check if we already have an ACTIVE connection
|
|
1695
|
+
existing_active = self._find_active_connection_for_app(self.app_name)
|
|
1696
|
+
if existing_active:
|
|
1697
|
+
connection_id, _ = existing_active
|
|
1698
|
+
build_config["auth_link"]["value"] = "validated"
|
|
1699
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1700
|
+
build_config["auth_link"]["connection_id"] = connection_id
|
|
1701
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1702
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1703
|
+
|
|
1704
|
+
# Clear auth fields when connected
|
|
1705
|
+
schema = self._get_toolkit_schema()
|
|
1706
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1707
|
+
|
|
1708
|
+
# Convert auth_mode to pill for connected state
|
|
1709
|
+
scheme, _ = self._get_connection_auth_info(connection_id)
|
|
1710
|
+
if scheme:
|
|
1711
|
+
build_config.setdefault("auth_mode", {})
|
|
1712
|
+
build_config["auth_mode"]["value"] = scheme
|
|
1713
|
+
build_config["auth_mode"]["options"] = [scheme]
|
|
1714
|
+
build_config["auth_mode"]["show"] = False
|
|
1715
|
+
try:
|
|
1716
|
+
pill = TabInput(
|
|
1717
|
+
name="auth_mode",
|
|
1718
|
+
display_name="Auth Mode",
|
|
1719
|
+
options=[scheme],
|
|
1720
|
+
value=scheme,
|
|
1721
|
+
).to_dict()
|
|
1722
|
+
pill["show"] = True
|
|
1723
|
+
build_config["auth_mode"] = pill
|
|
1724
|
+
except (TypeError, ValueError, AttributeError):
|
|
1725
|
+
build_config["auth_mode"] = {
|
|
1726
|
+
"name": "auth_mode",
|
|
1727
|
+
"display_name": "Auth Mode",
|
|
1728
|
+
"type": "tab",
|
|
1729
|
+
"options": [scheme],
|
|
1730
|
+
"value": scheme,
|
|
1731
|
+
"show": True,
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1734
|
+
logger.info(f"Using existing ACTIVE connection {connection_id} for {toolkit_slug}")
|
|
1735
|
+
return self.update_input_types(build_config)
|
|
1736
|
+
|
|
1737
|
+
# Only reuse ACTIVE connections; otherwise create a new connection
|
|
1738
|
+
stored_connection_id = None
|
|
1739
|
+
|
|
1740
|
+
# Create new connection ONLY if we truly have no usable connection yet
|
|
1741
|
+
if existing_active is None:
|
|
1742
|
+
# Check if we already have a redirect URL in progress
|
|
1743
|
+
current_auth_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
1744
|
+
if current_auth_link_value and current_auth_link_value.startswith(("http://", "https://")):
|
|
1745
|
+
# We already have a redirect URL, don't create a new one
|
|
1746
|
+
logger.info(f"Redirect URL already exists for {toolkit_slug}, skipping new creation")
|
|
1747
|
+
return self.update_input_types(build_config)
|
|
1748
|
+
|
|
1749
|
+
try:
|
|
1750
|
+
# Determine auth mode
|
|
1751
|
+
schema = self._get_toolkit_schema()
|
|
1752
|
+
mode = None
|
|
1753
|
+
if isinstance(build_config.get("auth_mode"), dict):
|
|
1754
|
+
mode = build_config["auth_mode"].get("value")
|
|
1755
|
+
# If no managed default exists (400 Default auth config), require mode selection
|
|
1756
|
+
managed = (schema or {}).get("composio_managed_auth_schemes") or []
|
|
1757
|
+
|
|
1758
|
+
# Handle "Composio_Managed" mode explicitly
|
|
1759
|
+
if mode == "Composio_Managed":
|
|
1760
|
+
# Use Composio_Managed auth flow
|
|
1761
|
+
redirect_url, connection_id = self._initiate_connection(toolkit_slug)
|
|
1762
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
1763
|
+
logger.info(f"New OAuth URL created for {toolkit_slug}: {redirect_url}")
|
|
1764
|
+
return self.update_input_types(build_config)
|
|
1765
|
+
|
|
1766
|
+
if not mode:
|
|
1767
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1768
|
+
build_config["auth_link"]["auth_tooltip"] = "Select Auth Mode"
|
|
1769
|
+
return self.update_input_types(build_config)
|
|
1770
|
+
# Custom modes: create auth config and/or initiate with config
|
|
1771
|
+
# Only validate auth_config_creation fields for OAUTH2
|
|
1772
|
+
required_missing = []
|
|
1773
|
+
if mode == "OAUTH2":
|
|
1774
|
+
req_names_pre = self._get_schema_field_names(
|
|
1775
|
+
schema,
|
|
1776
|
+
"OAUTH2",
|
|
1777
|
+
"auth_config_creation",
|
|
1778
|
+
"required",
|
|
1779
|
+
)
|
|
1780
|
+
for fname in req_names_pre:
|
|
1781
|
+
if fname in build_config:
|
|
1782
|
+
val = build_config[fname].get("value")
|
|
1783
|
+
if val in (None, ""):
|
|
1784
|
+
required_missing.append(fname)
|
|
1785
|
+
if required_missing:
|
|
1786
|
+
# Surface errors on each missing field
|
|
1787
|
+
for fname in required_missing:
|
|
1788
|
+
if fname in build_config and isinstance(build_config[fname], dict):
|
|
1789
|
+
build_config[fname]["helper_text"] = "This field is required"
|
|
1790
|
+
build_config[fname]["helper_text_metadata"] = {"variant": "destructive"}
|
|
1791
|
+
# Also reflect in info for guaranteed visibility
|
|
1792
|
+
existing_info = build_config[fname].get("info") or ""
|
|
1793
|
+
build_config[fname]["info"] = f"Required: {existing_info}".strip()
|
|
1794
|
+
build_config[fname]["show"] = True
|
|
1795
|
+
# Add a visible top-level hint near Auth Mode as well
|
|
1796
|
+
build_config.setdefault("auth_mode", {})
|
|
1797
|
+
missing_joined = ", ".join(required_missing)
|
|
1798
|
+
build_config["auth_mode"]["helper_text"] = f"Missing required: {missing_joined}"
|
|
1799
|
+
build_config["auth_mode"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
1800
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1801
|
+
build_config["auth_link"]["auth_tooltip"] = f"Missing: {missing_joined}"
|
|
1802
|
+
return self.update_input_types(build_config)
|
|
1803
|
+
composio = self._build_wrapper()
|
|
1804
|
+
if mode == "OAUTH2":
|
|
1805
|
+
# If an auth_config was already created via the button, use it and include initiation fields
|
|
1806
|
+
stored_ac_id = (build_config.get("auth_link") or {}).get("auth_config_id")
|
|
1807
|
+
if stored_ac_id:
|
|
1808
|
+
# Check if we already have a redirect URL to prevent duplicates
|
|
1809
|
+
current_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
1810
|
+
if current_link_value and current_link_value.startswith(("http://", "https://")):
|
|
1811
|
+
logger.info(
|
|
1812
|
+
f"Redirect URL already exists for {toolkit_slug} OAUTH2, skipping new creation"
|
|
1813
|
+
)
|
|
1814
|
+
return self.update_input_types(build_config)
|
|
1815
|
+
|
|
1816
|
+
# Use link method - no need to collect connection initiation fields
|
|
1817
|
+
redirect = composio.connected_accounts.link(
|
|
1818
|
+
user_id=self.entity_id,
|
|
1819
|
+
auth_config_id=stored_ac_id,
|
|
1820
|
+
)
|
|
1821
|
+
redirect_url = getattr(redirect, "redirect_url", None)
|
|
1822
|
+
connection_id = getattr(redirect, "id", None)
|
|
1823
|
+
if redirect_url:
|
|
1824
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
1825
|
+
if connection_id:
|
|
1826
|
+
build_config["auth_link"]["connection_id"] = connection_id
|
|
1827
|
+
# Clear action blocker text on successful initiation
|
|
1828
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1829
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1830
|
+
# Clear any auth fields
|
|
1831
|
+
schema = self._get_toolkit_schema()
|
|
1832
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1833
|
+
return self.update_input_types(build_config)
|
|
1834
|
+
# Otherwise, create custom OAuth2 auth config using schema-declared required fields
|
|
1835
|
+
credentials = {}
|
|
1836
|
+
missing = []
|
|
1837
|
+
# Collect required names from schema
|
|
1838
|
+
req_names = self._get_schema_field_names(
|
|
1839
|
+
schema,
|
|
1840
|
+
"OAUTH2",
|
|
1841
|
+
"auth_config_creation",
|
|
1842
|
+
"required",
|
|
1843
|
+
)
|
|
1844
|
+
candidate_names = set(self._auth_dynamic_fields) | req_names
|
|
1845
|
+
for fname in candidate_names:
|
|
1846
|
+
if fname in build_config:
|
|
1847
|
+
val = build_config[fname].get("value")
|
|
1848
|
+
if val not in (None, ""):
|
|
1849
|
+
credentials[fname] = val
|
|
1850
|
+
else:
|
|
1851
|
+
missing.append(fname)
|
|
1852
|
+
# proceed even if missing optional; backend will validate
|
|
1853
|
+
# Check if we already have a redirect URL to prevent duplicates
|
|
1854
|
+
current_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
1855
|
+
if current_link_value and current_link_value.startswith(("http://", "https://")):
|
|
1856
|
+
logger.info(
|
|
1857
|
+
f"Redirect URL already exists for {toolkit_slug} OAUTH2, skipping new creation"
|
|
1858
|
+
)
|
|
1859
|
+
return self.update_input_types(build_config)
|
|
1860
|
+
|
|
1861
|
+
ac = composio.auth_configs.create(
|
|
1862
|
+
toolkit=toolkit_slug,
|
|
1863
|
+
options={
|
|
1864
|
+
"type": "use_custom_auth",
|
|
1865
|
+
"auth_scheme": "OAUTH2",
|
|
1866
|
+
"credentials": credentials,
|
|
1867
|
+
},
|
|
1868
|
+
)
|
|
1869
|
+
auth_config_id = getattr(ac, "id", None)
|
|
1870
|
+
# Use link method directly - no need to check for connection initiation fields
|
|
1871
|
+
redirect = composio.connected_accounts.link(
|
|
1872
|
+
user_id=self.entity_id,
|
|
1873
|
+
auth_config_id=auth_config_id,
|
|
1874
|
+
)
|
|
1875
|
+
redirect_url = getattr(redirect, "redirect_url", None)
|
|
1876
|
+
connection_id = getattr(redirect, "id", None)
|
|
1877
|
+
if redirect_url:
|
|
1878
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
1879
|
+
if connection_id:
|
|
1880
|
+
build_config["auth_link"]["connection_id"] = connection_id
|
|
1881
|
+
# Hide auth fields immediately after successful initiation
|
|
1882
|
+
schema = self._get_toolkit_schema()
|
|
1883
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1884
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1885
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1886
|
+
return self.update_input_types(build_config)
|
|
1887
|
+
if mode == "API_KEY":
|
|
1888
|
+
# Check if we already have a redirect URL to prevent duplicates
|
|
1889
|
+
current_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
1890
|
+
if current_link_value and current_link_value.startswith(("http://", "https://")):
|
|
1891
|
+
logger.info(
|
|
1892
|
+
f"Redirect URL already exists for {toolkit_slug} API_KEY, skipping new creation"
|
|
1893
|
+
)
|
|
1894
|
+
return self.update_input_types(build_config)
|
|
1895
|
+
|
|
1896
|
+
ac = composio.auth_configs.create(
|
|
1897
|
+
toolkit=toolkit_slug,
|
|
1898
|
+
options={"type": "use_custom_auth", "auth_scheme": "API_KEY", "credentials": {}},
|
|
1899
|
+
)
|
|
1900
|
+
auth_config_id = getattr(ac, "id", None)
|
|
1901
|
+
# Use link method - user will enter API key on Composio page
|
|
1902
|
+
initiation = composio.connected_accounts.link(
|
|
1903
|
+
user_id=self.entity_id,
|
|
1904
|
+
auth_config_id=auth_config_id,
|
|
1905
|
+
)
|
|
1906
|
+
connection_id = getattr(initiation, "id", None)
|
|
1907
|
+
redirect_url = getattr(initiation, "redirect_url", None)
|
|
1908
|
+
# API_KEY now also returns redirect URL with new link method
|
|
1909
|
+
if redirect_url:
|
|
1910
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
1911
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1912
|
+
# Hide auth fields immediately after successful initiation
|
|
1913
|
+
schema = self._get_toolkit_schema()
|
|
1914
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1915
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1916
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1917
|
+
|
|
1918
|
+
return self.update_input_types(build_config)
|
|
1919
|
+
# Generic custom auth flow for any other mode (treat like API_KEY)
|
|
1920
|
+
# Check if we already have a redirect URL to prevent duplicates
|
|
1921
|
+
current_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
1922
|
+
if current_link_value and current_link_value.startswith(("http://", "https://")):
|
|
1923
|
+
logger.info(f"Redirect URL already exists for {toolkit_slug} {mode}, skipping new creation")
|
|
1924
|
+
return self.update_input_types(build_config)
|
|
1925
|
+
|
|
1926
|
+
ac = composio.auth_configs.create(
|
|
1927
|
+
toolkit=toolkit_slug,
|
|
1928
|
+
options={"type": "use_custom_auth", "auth_scheme": mode, "credentials": {}},
|
|
1929
|
+
)
|
|
1930
|
+
auth_config_id = getattr(ac, "id", None)
|
|
1931
|
+
# Use link method - user will enter required fields on Composio page
|
|
1932
|
+
initiation = composio.connected_accounts.link(
|
|
1933
|
+
user_id=self.entity_id,
|
|
1934
|
+
auth_config_id=auth_config_id,
|
|
1935
|
+
)
|
|
1936
|
+
connection_id = getattr(initiation, "id", None)
|
|
1937
|
+
redirect_url = getattr(initiation, "redirect_url", None)
|
|
1938
|
+
if redirect_url:
|
|
1939
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
1940
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1941
|
+
# Clear auth fields
|
|
1942
|
+
schema = self._get_toolkit_schema()
|
|
1943
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
1944
|
+
build_config["action_button"]["helper_text"] = ""
|
|
1945
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
1946
|
+
return self.update_input_types(build_config)
|
|
1947
|
+
except (ValueError, ConnectionError, TypeError) as e:
|
|
1948
|
+
logger.error(f"Error creating connection: {e}")
|
|
1949
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1950
|
+
build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
|
|
1951
|
+
else:
|
|
1952
|
+
return self.update_input_types(build_config)
|
|
1953
|
+
else:
|
|
1954
|
+
# We already have a usable connection; no new OAuth request
|
|
1955
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1956
|
+
|
|
1957
|
+
except (ValueError, ConnectionError) as e:
|
|
1958
|
+
logger.error(f"Error in connection initiation: {e}")
|
|
1959
|
+
build_config["auth_link"]["value"] = "connect"
|
|
1960
|
+
build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
|
|
1961
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
1962
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
1963
|
+
return build_config
|
|
1964
|
+
|
|
1965
|
+
# Check for ACTIVE connections and update status accordingly (tool mode)
|
|
1966
|
+
if hasattr(self, "api_key") and self.api_key:
|
|
1967
|
+
stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
|
|
1968
|
+
active_connection_id = None
|
|
1969
|
+
|
|
1970
|
+
# First try to check stored connection ID
|
|
1971
|
+
if stored_connection_id:
|
|
1972
|
+
status = self._check_connection_status_by_id(stored_connection_id)
|
|
1973
|
+
if status == "ACTIVE":
|
|
1974
|
+
active_connection_id = stored_connection_id
|
|
1975
|
+
|
|
1976
|
+
# If no stored connection or stored connection is not ACTIVE, find any ACTIVE connection
|
|
1977
|
+
if not active_connection_id:
|
|
1978
|
+
active_connection = self._find_active_connection_for_app(self.app_name)
|
|
1979
|
+
if active_connection:
|
|
1980
|
+
active_connection_id, _ = active_connection
|
|
1981
|
+
# Store the found active connection ID for future use
|
|
1982
|
+
if "auth_link" not in build_config:
|
|
1983
|
+
build_config["auth_link"] = {}
|
|
1984
|
+
build_config["auth_link"]["connection_id"] = active_connection_id
|
|
1985
|
+
|
|
1986
|
+
if active_connection_id:
|
|
1987
|
+
# Show validated connection status
|
|
1988
|
+
build_config["auth_link"]["value"] = "validated"
|
|
1989
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
1990
|
+
build_config["auth_link"]["show"] = False
|
|
1991
|
+
# Update auth mode UI to reflect connected scheme
|
|
1992
|
+
scheme, _ = self._get_connection_auth_info(active_connection_id)
|
|
1993
|
+
if scheme:
|
|
1994
|
+
build_config.setdefault("auth_link", {})
|
|
1995
|
+
build_config["auth_link"]["auth_scheme"] = scheme
|
|
1996
|
+
build_config.setdefault("auth_mode", {})
|
|
1997
|
+
build_config["auth_mode"]["value"] = scheme
|
|
1998
|
+
build_config["auth_mode"]["options"] = [scheme]
|
|
1999
|
+
build_config["auth_mode"]["show"] = False
|
|
2000
|
+
try:
|
|
2001
|
+
pill = TabInput(
|
|
2002
|
+
name="auth_mode",
|
|
2003
|
+
display_name="Auth Mode",
|
|
2004
|
+
options=[scheme],
|
|
2005
|
+
value=scheme,
|
|
2006
|
+
).to_dict()
|
|
2007
|
+
pill["show"] = True
|
|
2008
|
+
build_config["auth_mode"] = pill
|
|
2009
|
+
except (TypeError, ValueError, AttributeError):
|
|
2010
|
+
build_config["auth_mode"] = {
|
|
2011
|
+
"name": "auth_mode",
|
|
2012
|
+
"display_name": "Auth Mode",
|
|
2013
|
+
"type": "tab",
|
|
2014
|
+
"options": [scheme],
|
|
2015
|
+
"value": scheme,
|
|
2016
|
+
"show": True,
|
|
2017
|
+
}
|
|
2018
|
+
build_config["action_button"]["helper_text"] = ""
|
|
2019
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
2020
|
+
# Clear any auth fields since we are already connected
|
|
2021
|
+
schema = self._get_toolkit_schema()
|
|
2022
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
2023
|
+
build_config.setdefault("create_auth_config", {})
|
|
2024
|
+
build_config["create_auth_config"]["show"] = False
|
|
2025
|
+
build_config["action_button"]["helper_text"] = ""
|
|
2026
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
2027
|
+
else:
|
|
2028
|
+
build_config["auth_link"]["value"] = "connect"
|
|
2029
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
2030
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
2031
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
2032
|
+
|
|
2033
|
+
# CRITICAL: If tool_mode is enabled from ANY source, hide action UI but keep auth flow available
|
|
2034
|
+
if current_tool_mode:
|
|
2035
|
+
build_config["action_button"]["show"] = False
|
|
2036
|
+
|
|
2037
|
+
# Hide ALL action parameter fields when tool mode is enabled
|
|
2038
|
+
for field in self._all_fields:
|
|
2039
|
+
if field in build_config:
|
|
2040
|
+
build_config[field]["show"] = False
|
|
2041
|
+
|
|
2042
|
+
# Also hide any other action-related fields that might be in build_config
|
|
2043
|
+
for field_name_in_config in build_config: # noqa: PLC0206
|
|
2044
|
+
# Skip base fields like api_key, tool_mode, action, etc., and dynamic auth fields
|
|
2045
|
+
if (
|
|
2046
|
+
field_name_in_config
|
|
2047
|
+
not in [
|
|
2048
|
+
"api_key",
|
|
2049
|
+
"tool_mode",
|
|
2050
|
+
"action_button",
|
|
2051
|
+
"auth_link",
|
|
2052
|
+
"entity_id",
|
|
2053
|
+
"auth_mode",
|
|
2054
|
+
"auth_mode_pill",
|
|
2055
|
+
]
|
|
2056
|
+
and field_name_in_config not in getattr(self, "_auth_dynamic_fields", set())
|
|
2057
|
+
and isinstance(build_config[field_name_in_config], dict)
|
|
2058
|
+
and "show" in build_config[field_name_in_config]
|
|
2059
|
+
):
|
|
2060
|
+
build_config[field_name_in_config]["show"] = False
|
|
2061
|
+
|
|
2062
|
+
# ENSURE tool_mode state is preserved in build_config for future calls
|
|
2063
|
+
if "tool_mode" not in build_config:
|
|
2064
|
+
build_config["tool_mode"] = {"value": True}
|
|
2065
|
+
elif isinstance(build_config["tool_mode"], dict):
|
|
2066
|
+
build_config["tool_mode"]["value"] = True
|
|
2067
|
+
# Keep auth UI available and render fields if needed
|
|
2068
|
+
build_config.setdefault("auth_link", {})
|
|
2069
|
+
build_config["auth_link"]["show"] = False
|
|
2070
|
+
build_config["auth_link"]["display_name"] = ""
|
|
2071
|
+
|
|
2072
|
+
# Only render auth fields if NOT already connected
|
|
2073
|
+
active_connection = self._find_active_connection_for_app(self.app_name)
|
|
2074
|
+
if not active_connection:
|
|
2075
|
+
try:
|
|
2076
|
+
schema = self._get_toolkit_schema()
|
|
2077
|
+
mode = (build_config.get("auth_mode") or {}).get("value")
|
|
2078
|
+
managed = (schema or {}).get("composio_managed_auth_schemes") or []
|
|
2079
|
+
token_modes = ["API_KEY", "BEARER_TOKEN", "BASIC"]
|
|
2080
|
+
if (
|
|
2081
|
+
mode
|
|
2082
|
+
and mode not in ["Composio_Managed", *token_modes]
|
|
2083
|
+
and not getattr(self, "_auth_dynamic_fields", set())
|
|
2084
|
+
):
|
|
2085
|
+
self._render_custom_auth_fields(build_config, schema or {}, mode)
|
|
2086
|
+
# Already reordered in _render_custom_auth_fields
|
|
2087
|
+
except (TypeError, ValueError, AttributeError):
|
|
2088
|
+
pass
|
|
2089
|
+
else:
|
|
2090
|
+
# If connected, clear any auth fields that might be showing
|
|
2091
|
+
self._clear_auth_dynamic_fields(build_config)
|
|
2092
|
+
# Do NOT return here; allow auth flow to run in Tool Mode
|
|
2093
|
+
|
|
2094
|
+
if field_name == "tool_mode":
|
|
2095
|
+
if field_value is True:
|
|
2096
|
+
build_config["action_button"]["show"] = False # Hide action field when tool mode is enabled
|
|
2097
|
+
for field in self._all_fields:
|
|
2098
|
+
build_config[field]["show"] = False # Update show status for all fields based on tool mode
|
|
2099
|
+
elif field_value is False:
|
|
2100
|
+
build_config["action_button"]["show"] = True # Show action field when tool mode is disabled
|
|
2101
|
+
for field in self._all_fields:
|
|
2102
|
+
build_config[field]["show"] = True # Update show status for all fields based on tool mode
|
|
2103
|
+
return self.update_input_types(build_config)
|
|
2104
|
+
|
|
2105
|
+
if field_name == "action_button":
|
|
2106
|
+
# If selection is cancelled/cleared, remove generated fields
|
|
2107
|
+
def _is_cleared(val: Any) -> bool:
|
|
2108
|
+
return (
|
|
2109
|
+
not val
|
|
2110
|
+
or (
|
|
2111
|
+
isinstance(val, list)
|
|
2112
|
+
and (len(val) == 0 or (len(val) > 0 and isinstance(val[0], dict) and not val[0].get("name")))
|
|
2113
|
+
)
|
|
2114
|
+
or (isinstance(val, str) and val in ("", "disabled", "placeholder"))
|
|
2115
|
+
)
|
|
2116
|
+
|
|
2117
|
+
if _is_cleared(field_value):
|
|
2118
|
+
self._hide_all_action_fields(build_config)
|
|
2119
|
+
return self.update_input_types(build_config)
|
|
2120
|
+
|
|
2121
|
+
self._update_action_config(build_config, field_value)
|
|
2122
|
+
# Keep the existing show/hide behaviour
|
|
2123
|
+
self.show_hide_fields(build_config, field_value)
|
|
2124
|
+
return self.update_input_types(build_config)
|
|
2125
|
+
|
|
2126
|
+
# Handle auth config button click
|
|
2127
|
+
if field_name == "create_auth_config" and field_value == "create":
|
|
2128
|
+
try:
|
|
2129
|
+
# Check if we already have a redirect URL to prevent duplicates
|
|
2130
|
+
current_link_value = build_config.get("auth_link", {}).get("value", "")
|
|
2131
|
+
if current_link_value and current_link_value.startswith(("http://", "https://")):
|
|
2132
|
+
logger.info("Redirect URL already exists, skipping new auth config creation")
|
|
2133
|
+
return self.update_input_types(build_config)
|
|
2134
|
+
|
|
2135
|
+
composio = self._build_wrapper()
|
|
2136
|
+
toolkit_slug = self.app_name.lower()
|
|
2137
|
+
schema = self._get_toolkit_schema() or {}
|
|
2138
|
+
# Collect required fields from the current build_config
|
|
2139
|
+
credentials = {}
|
|
2140
|
+
req_names = self._get_schema_field_names(schema, "OAUTH2", "auth_config_creation", "required")
|
|
2141
|
+
candidate_names = set(self._auth_dynamic_fields) | req_names
|
|
2142
|
+
for fname in candidate_names:
|
|
2143
|
+
if fname in build_config:
|
|
2144
|
+
val = build_config[fname].get("value")
|
|
2145
|
+
if val not in (None, ""):
|
|
2146
|
+
credentials[fname] = val
|
|
2147
|
+
# Create a new auth config using the collected credentials
|
|
2148
|
+
ac = composio.auth_configs.create(
|
|
2149
|
+
toolkit=toolkit_slug,
|
|
2150
|
+
options={"type": "use_custom_auth", "auth_scheme": "OAUTH2", "credentials": credentials},
|
|
2151
|
+
)
|
|
2152
|
+
auth_config_id = getattr(ac, "id", None)
|
|
2153
|
+
build_config.setdefault("auth_link", {})
|
|
2154
|
+
if auth_config_id:
|
|
2155
|
+
# Use link method directly - no need to check for connection initiation fields
|
|
2156
|
+
connection_request = composio.connected_accounts.link(
|
|
2157
|
+
user_id=self.entity_id, auth_config_id=auth_config_id
|
|
2158
|
+
)
|
|
2159
|
+
redirect_url = getattr(connection_request, "redirect_url", None)
|
|
2160
|
+
connection_id = getattr(connection_request, "id", None)
|
|
2161
|
+
if redirect_url and redirect_url.startswith(("http://", "https://")):
|
|
2162
|
+
build_config["auth_link"]["value"] = redirect_url
|
|
2163
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
2164
|
+
build_config["auth_link"]["connection_id"] = connection_id
|
|
2165
|
+
build_config["action_button"]["helper_text"] = ""
|
|
2166
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
2167
|
+
logger.info(f"New OAuth URL created for {toolkit_slug}: {redirect_url}")
|
|
2168
|
+
else:
|
|
2169
|
+
logger.error(f"Failed to initiate connection with new auth config: {redirect_url}")
|
|
2170
|
+
build_config["auth_link"]["value"] = "error"
|
|
2171
|
+
build_config["auth_link"]["auth_tooltip"] = f"Error: {redirect_url}"
|
|
2172
|
+
else:
|
|
2173
|
+
logger.error(f"Failed to create new auth config for {toolkit_slug}")
|
|
2174
|
+
build_config["auth_link"]["value"] = "error"
|
|
2175
|
+
build_config["auth_link"]["auth_tooltip"] = "Create Auth Config failed"
|
|
2176
|
+
except (ValueError, ConnectionError, TypeError) as e:
|
|
2177
|
+
logger.error(f"Error creating new auth config: {e}")
|
|
2178
|
+
build_config["auth_link"]["value"] = "error"
|
|
2179
|
+
build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
|
|
2180
|
+
return self.update_input_types(build_config)
|
|
2181
|
+
|
|
2182
|
+
# Handle API key removal
|
|
2183
|
+
if field_name == "api_key" and len(field_value) == 0:
|
|
2184
|
+
build_config["auth_link"]["value"] = ""
|
|
2185
|
+
build_config["auth_link"]["auth_tooltip"] = "Please provide a valid Composio API Key."
|
|
2186
|
+
build_config["action_button"]["options"] = []
|
|
2187
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
2188
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
2189
|
+
build_config.setdefault("auth_link", {})
|
|
2190
|
+
build_config["auth_link"].pop("connection_id", None)
|
|
2191
|
+
build_config["auth_link"].pop("auth_scheme", None)
|
|
2192
|
+
# Restore auth_mode dropdown and hide pill
|
|
2193
|
+
try:
|
|
2194
|
+
dd = DropdownInput(
|
|
2195
|
+
name="auth_mode",
|
|
2196
|
+
display_name="Auth Mode",
|
|
2197
|
+
options=[],
|
|
2198
|
+
placeholder="Select auth mode",
|
|
2199
|
+
toggle=True,
|
|
2200
|
+
toggle_disable=True,
|
|
2201
|
+
show=True,
|
|
2202
|
+
real_time_refresh=True,
|
|
2203
|
+
helper_text="Choose how to authenticate with the toolkit.",
|
|
2204
|
+
).to_dict()
|
|
2205
|
+
build_config["auth_mode"] = dd
|
|
2206
|
+
except (TypeError, ValueError, AttributeError):
|
|
2207
|
+
build_config.setdefault("auth_mode", {})
|
|
2208
|
+
build_config["auth_mode"]["show"] = True
|
|
2209
|
+
build_config["auth_mode"].pop("value", None)
|
|
2210
|
+
# NEW: Clear any selected action and hide generated fields when API key is cleared
|
|
2211
|
+
try:
|
|
2212
|
+
if "action_button" in build_config and isinstance(build_config["action_button"], dict):
|
|
2213
|
+
build_config["action_button"]["value"] = "disabled"
|
|
2214
|
+
self._hide_all_action_fields(build_config)
|
|
2215
|
+
except (TypeError, ValueError, AttributeError):
|
|
2216
|
+
pass
|
|
2217
|
+
return self.update_input_types(build_config)
|
|
2218
|
+
|
|
2219
|
+
# Only proceed with connection logic if we have an API key
|
|
2220
|
+
if not hasattr(self, "api_key") or not self.api_key:
|
|
2221
|
+
return self.update_input_types(build_config)
|
|
2222
|
+
|
|
2223
|
+
# CRITICAL: If tool_mode is enabled (check both instance and build_config), skip all connection logic
|
|
2224
|
+
if current_tool_mode:
|
|
2225
|
+
build_config["action_button"]["show"] = False
|
|
2226
|
+
return self.update_input_types(build_config)
|
|
2227
|
+
|
|
2228
|
+
# Update action options only if tool_mode is disabled
|
|
2229
|
+
self._build_action_maps()
|
|
2230
|
+
# Only set options if they haven't been set already during action population
|
|
2231
|
+
if "options" not in build_config.get("action_button", {}) or not build_config["action_button"]["options"]:
|
|
2232
|
+
build_config["action_button"]["options"] = [
|
|
2233
|
+
{"name": self.sanitize_action_name(action), "metadata": action} for action in self._actions_data
|
|
2234
|
+
]
|
|
2235
|
+
logger.debug("Setting action options from main logic path")
|
|
2236
|
+
else:
|
|
2237
|
+
logger.debug("Action options already set, skipping duplicate setting")
|
|
2238
|
+
# Only set show=True if tool_mode is not enabled
|
|
2239
|
+
if not current_tool_mode:
|
|
2240
|
+
build_config["action_button"]["show"] = True
|
|
2241
|
+
|
|
2242
|
+
stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
|
|
2243
|
+
active_connection_id = None
|
|
2244
|
+
|
|
2245
|
+
if stored_connection_id:
|
|
2246
|
+
status = self._check_connection_status_by_id(stored_connection_id)
|
|
2247
|
+
if status == "ACTIVE":
|
|
2248
|
+
active_connection_id = stored_connection_id
|
|
2249
|
+
|
|
2250
|
+
if not active_connection_id:
|
|
2251
|
+
active_connection = self._find_active_connection_for_app(self.app_name)
|
|
2252
|
+
if active_connection:
|
|
2253
|
+
active_connection_id, _ = active_connection
|
|
2254
|
+
if "auth_link" not in build_config:
|
|
2255
|
+
build_config["auth_link"] = {}
|
|
2256
|
+
build_config["auth_link"]["connection_id"] = active_connection_id
|
|
2257
|
+
|
|
2258
|
+
if active_connection_id:
|
|
2259
|
+
build_config["auth_link"]["value"] = "validated"
|
|
2260
|
+
build_config["auth_link"]["auth_tooltip"] = "Disconnect"
|
|
2261
|
+
build_config["action_button"]["helper_text"] = ""
|
|
2262
|
+
build_config["action_button"]["helper_text_metadata"] = {}
|
|
2263
|
+
|
|
2264
|
+
# Clear auth fields when connected
|
|
2265
|
+
schema = self._get_toolkit_schema()
|
|
2266
|
+
self._clear_auth_fields_from_schema(build_config, schema)
|
|
2267
|
+
|
|
2268
|
+
# Convert auth_mode to pill for connected state
|
|
2269
|
+
scheme, _ = self._get_connection_auth_info(active_connection_id)
|
|
2270
|
+
if scheme:
|
|
2271
|
+
build_config.setdefault("auth_mode", {})
|
|
2272
|
+
build_config["auth_mode"]["value"] = scheme
|
|
2273
|
+
build_config["auth_mode"]["options"] = [scheme]
|
|
2274
|
+
build_config["auth_mode"]["show"] = False
|
|
2275
|
+
try:
|
|
2276
|
+
pill = TabInput(
|
|
2277
|
+
name="auth_mode",
|
|
2278
|
+
display_name="Auth Mode",
|
|
2279
|
+
options=[scheme],
|
|
2280
|
+
value=scheme,
|
|
2281
|
+
).to_dict()
|
|
2282
|
+
pill["show"] = True
|
|
2283
|
+
build_config["auth_mode"] = pill
|
|
2284
|
+
except (TypeError, ValueError, AttributeError):
|
|
2285
|
+
build_config["auth_mode"] = {
|
|
2286
|
+
"name": "auth_mode",
|
|
2287
|
+
"display_name": "Auth Mode",
|
|
2288
|
+
"type": "tab",
|
|
2289
|
+
"options": [scheme],
|
|
2290
|
+
"value": scheme,
|
|
2291
|
+
"show": True,
|
|
2292
|
+
}
|
|
2293
|
+
elif stored_connection_id:
|
|
2294
|
+
status = self._check_connection_status_by_id(stored_connection_id)
|
|
2295
|
+
if status == "INITIATED":
|
|
2296
|
+
current_value = build_config.get("auth_link", {}).get("value")
|
|
2297
|
+
if not current_value or current_value == "connect":
|
|
2298
|
+
build_config["auth_link"]["value"] = "connect"
|
|
2299
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
2300
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
2301
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
2302
|
+
else:
|
|
2303
|
+
# Connection not found or other status
|
|
2304
|
+
build_config["auth_link"]["value"] = "connect"
|
|
2305
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
2306
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
2307
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
2308
|
+
else:
|
|
2309
|
+
build_config["auth_link"]["value"] = "connect"
|
|
2310
|
+
build_config["auth_link"]["auth_tooltip"] = "Connect"
|
|
2311
|
+
build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
|
|
2312
|
+
build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
|
|
2313
|
+
|
|
2314
|
+
if self._is_tool_mode_enabled():
|
|
2315
|
+
build_config["action_button"]["show"] = False
|
|
2316
|
+
|
|
2317
|
+
return self.update_input_types(build_config)
|
|
2318
|
+
|
|
2319
|
+
def configure_tools(self, composio: Composio, limit: int | None = None) -> list[Tool]:
|
|
2320
|
+
if limit is None:
|
|
2321
|
+
limit = 999
|
|
2322
|
+
|
|
2323
|
+
tools = composio.tools.get(user_id=self.entity_id, toolkits=[self.app_name.lower()], limit=limit)
|
|
2324
|
+
configured_tools = []
|
|
2325
|
+
for tool in tools:
|
|
2326
|
+
# Set the sanitized name
|
|
2327
|
+
display_name = self._actions_data.get(tool.name, {}).get(
|
|
2328
|
+
"display_name", self._sanitized_names.get(tool.name, self._name_sanitizer.sub("-", tool.name))
|
|
2329
|
+
)
|
|
2330
|
+
# Set the tags
|
|
2331
|
+
tool.tags = [tool.name]
|
|
2332
|
+
tool.metadata = {"display_name": display_name, "display_description": tool.description, "readonly": True}
|
|
2333
|
+
configured_tools.append(tool)
|
|
2334
|
+
return configured_tools
|
|
2335
|
+
|
|
2336
|
+
async def _get_tools(self) -> list[Tool]:
|
|
2337
|
+
"""Get tools with cached results and optimized name sanitization."""
|
|
2338
|
+
composio = self._build_wrapper()
|
|
2339
|
+
self.set_default_tools()
|
|
2340
|
+
return self.configure_tools(composio)
|
|
2341
|
+
|
|
2342
|
+
@property
|
|
2343
|
+
def enabled_tools(self):
|
|
2344
|
+
"""Return tag names for actions of this app that should be exposed to the agent.
|
|
2345
|
+
|
|
2346
|
+
If default tools are set via set_default_tools(), returns those.
|
|
2347
|
+
Otherwise, returns only the first few tools (limited by default_tools_limit)
|
|
2348
|
+
to prevent overwhelming the agent. Subclasses can override this behavior.
|
|
2349
|
+
|
|
2350
|
+
"""
|
|
2351
|
+
if not self._actions_data:
|
|
2352
|
+
self._populate_actions_data()
|
|
2353
|
+
|
|
2354
|
+
if hasattr(self, "_default_tools") and self._default_tools:
|
|
2355
|
+
return list(self._default_tools)
|
|
2356
|
+
|
|
2357
|
+
all_tools = list(self._actions_data.keys())
|
|
2358
|
+
limit = getattr(self, "default_tools_limit", 5)
|
|
2359
|
+
return all_tools[:limit]
|
|
2360
|
+
|
|
2361
|
+
def execute_action(self):
|
|
2362
|
+
"""Execute the selected Composio tool."""
|
|
2363
|
+
# Check if we're in Astra cloud environment and raise an error if we are.
|
|
2364
|
+
raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
|
|
2365
|
+
composio = self._build_wrapper()
|
|
2366
|
+
self._populate_actions_data()
|
|
2367
|
+
self._build_action_maps()
|
|
2368
|
+
|
|
2369
|
+
display_name = (
|
|
2370
|
+
self.action_button[0]["name"]
|
|
2371
|
+
if isinstance(getattr(self, "action_button", None), list) and self.action_button
|
|
2372
|
+
else self.action_button
|
|
2373
|
+
)
|
|
2374
|
+
action_key = self._display_to_key_map.get(display_name)
|
|
2375
|
+
|
|
2376
|
+
if not action_key:
|
|
2377
|
+
msg = f"Invalid action: {display_name}"
|
|
2378
|
+
raise ValueError(msg)
|
|
2379
|
+
|
|
2380
|
+
try:
|
|
2381
|
+
arguments: dict[str, Any] = {}
|
|
2382
|
+
param_fields = self._actions_data.get(action_key, {}).get("action_fields", [])
|
|
2383
|
+
|
|
2384
|
+
schema_dict = self._action_schemas.get(action_key, {})
|
|
2385
|
+
parameters_schema = schema_dict.get("input_parameters", {})
|
|
2386
|
+
schema_properties = parameters_schema.get("properties", {}) if parameters_schema else {}
|
|
2387
|
+
# Handle case where 'required' field is None (causes "'NoneType' object is not iterable")
|
|
2388
|
+
required_list = parameters_schema.get("required", []) if parameters_schema else []
|
|
2389
|
+
required_fields = set(required_list) if required_list is not None else set()
|
|
2390
|
+
|
|
2391
|
+
for field in param_fields:
|
|
2392
|
+
if not hasattr(self, field):
|
|
2393
|
+
continue
|
|
2394
|
+
value = getattr(self, field)
|
|
2395
|
+
|
|
2396
|
+
# Skip None, empty strings, and empty lists
|
|
2397
|
+
if value is None or value == "" or (isinstance(value, list) and len(value) == 0):
|
|
2398
|
+
continue
|
|
2399
|
+
|
|
2400
|
+
# Determine schema for this field
|
|
2401
|
+
prop_schema = schema_properties.get(field, {})
|
|
2402
|
+
|
|
2403
|
+
# Parse JSON for object/array string inputs (applies to required and optional)
|
|
2404
|
+
if isinstance(value, str) and prop_schema.get("type") in {"array", "object"}:
|
|
2405
|
+
try:
|
|
2406
|
+
value = json.loads(value)
|
|
2407
|
+
except json.JSONDecodeError:
|
|
2408
|
+
# Fallback for simple arrays of primitives
|
|
2409
|
+
if prop_schema.get("type") == "array":
|
|
2410
|
+
value = [item.strip() for item in value.split(",") if item.strip() != ""]
|
|
2411
|
+
|
|
2412
|
+
# For optional fields, be more strict about including them
|
|
2413
|
+
# Only include if the user has explicitly provided a meaningful value
|
|
2414
|
+
if field not in required_fields:
|
|
2415
|
+
# Compare against schema default after normalization
|
|
2416
|
+
schema_default = prop_schema.get("default")
|
|
2417
|
+
if value == schema_default:
|
|
2418
|
+
continue
|
|
2419
|
+
|
|
2420
|
+
if field in self._bool_variables:
|
|
2421
|
+
value = bool(value)
|
|
2422
|
+
|
|
2423
|
+
# Handle renamed fields - map back to original names for API execution
|
|
2424
|
+
final_field_name = field
|
|
2425
|
+
if field.endswith("_user_id") and field.startswith(self.app_name):
|
|
2426
|
+
final_field_name = "user_id"
|
|
2427
|
+
elif field == f"{self.app_name}_status":
|
|
2428
|
+
final_field_name = "status"
|
|
2429
|
+
elif field == f"{self.app_name}_name":
|
|
2430
|
+
final_field_name = "name"
|
|
2431
|
+
|
|
2432
|
+
arguments[final_field_name] = value
|
|
2433
|
+
|
|
2434
|
+
# Get the version from the action data
|
|
2435
|
+
version = self._actions_data.get(action_key, {}).get("version")
|
|
2436
|
+
if version:
|
|
2437
|
+
logger.info(f"Executing {action_key} with version: {version}")
|
|
2438
|
+
|
|
2439
|
+
# Execute using new SDK with version parameter
|
|
2440
|
+
execute_params = {
|
|
2441
|
+
"slug": action_key,
|
|
2442
|
+
"arguments": arguments,
|
|
2443
|
+
"user_id": self.entity_id,
|
|
2444
|
+
}
|
|
2445
|
+
|
|
2446
|
+
# Only add version if it's available
|
|
2447
|
+
if version:
|
|
2448
|
+
execute_params["version"] = version
|
|
2449
|
+
|
|
2450
|
+
result = composio.tools.execute(**execute_params)
|
|
2451
|
+
|
|
2452
|
+
if isinstance(result, dict) and "successful" in result:
|
|
2453
|
+
if result["successful"]:
|
|
2454
|
+
raw_data = result.get("data", result)
|
|
2455
|
+
return self._apply_post_processor(action_key, raw_data)
|
|
2456
|
+
error_msg = result.get("error", "Tool execution failed")
|
|
2457
|
+
raise ValueError(error_msg)
|
|
2458
|
+
|
|
2459
|
+
except ValueError as e:
|
|
2460
|
+
logger.error(f"Failed to execute {action_key}: {e}")
|
|
2461
|
+
raise
|
|
2462
|
+
|
|
2463
|
+
def _apply_post_processor(self, action_key: str, raw_data: Any) -> Any:
|
|
2464
|
+
"""Apply post-processor for the given action if defined."""
|
|
2465
|
+
if hasattr(self, "post_processors") and isinstance(self.post_processors, dict):
|
|
2466
|
+
processor_func = self.post_processors.get(action_key)
|
|
2467
|
+
if processor_func and callable(processor_func):
|
|
2468
|
+
try:
|
|
2469
|
+
return processor_func(raw_data)
|
|
2470
|
+
except (TypeError, ValueError, KeyError) as e:
|
|
2471
|
+
logger.error(f"Error in post-processor for {action_key}: {e} (Exception type: {type(e).__name__})")
|
|
2472
|
+
return raw_data
|
|
2473
|
+
|
|
2474
|
+
return raw_data
|
|
2475
|
+
|
|
2476
|
+
def set_default_tools(self):
|
|
2477
|
+
"""Set the default tools."""
|
|
2478
|
+
|
|
2479
|
+
def _get_schema_field_names(
|
|
2480
|
+
self,
|
|
2481
|
+
schema: dict[str, Any] | None,
|
|
2482
|
+
mode: str,
|
|
2483
|
+
section_kind: str,
|
|
2484
|
+
bucket: str,
|
|
2485
|
+
) -> set[str]:
|
|
2486
|
+
names: set[str] = set()
|
|
2487
|
+
if not schema:
|
|
2488
|
+
return names
|
|
2489
|
+
details = schema.get("auth_config_details") or schema.get("authConfigDetails") or []
|
|
2490
|
+
for item in details:
|
|
2491
|
+
if (item.get("mode") or item.get("auth_method")) != mode:
|
|
2492
|
+
continue
|
|
2493
|
+
fields = item.get("fields") or {}
|
|
2494
|
+
section = (
|
|
2495
|
+
fields.get(section_kind)
|
|
2496
|
+
or fields.get(
|
|
2497
|
+
"authConfigCreation" if section_kind == "auth_config_creation" else "connectedAccountInitiation"
|
|
2498
|
+
)
|
|
2499
|
+
or {}
|
|
2500
|
+
)
|
|
2501
|
+
for entry in section.get(bucket, []) or []:
|
|
2502
|
+
name = entry.get("name") if isinstance(entry, dict) else None
|
|
2503
|
+
if name:
|
|
2504
|
+
names.add(name)
|
|
2505
|
+
return names
|
|
2506
|
+
|
|
2507
|
+
def _get_schema_required_entries(
|
|
2508
|
+
self,
|
|
2509
|
+
schema: dict[str, Any] | None,
|
|
2510
|
+
mode: str,
|
|
2511
|
+
section_kind: str,
|
|
2512
|
+
) -> list[dict[str, Any]]:
|
|
2513
|
+
if not schema:
|
|
2514
|
+
return []
|
|
2515
|
+
details = schema.get("auth_config_details") or schema.get("authConfigDetails") or []
|
|
2516
|
+
for item in details:
|
|
2517
|
+
if (item.get("mode") or item.get("auth_method")) != mode:
|
|
2518
|
+
continue
|
|
2519
|
+
fields = item.get("fields") or {}
|
|
2520
|
+
section = (
|
|
2521
|
+
fields.get(section_kind)
|
|
2522
|
+
or fields.get(
|
|
2523
|
+
"authConfigCreation" if section_kind == "auth_config_creation" else "connectedAccountInitiation"
|
|
2524
|
+
)
|
|
2525
|
+
or {}
|
|
2526
|
+
)
|
|
2527
|
+
req = section.get("required", []) or []
|
|
2528
|
+
# Normalize dict-like entries
|
|
2529
|
+
return [entry for entry in req if isinstance(entry, dict)]
|
|
2530
|
+
return []
|
|
2531
|
+
|
|
2532
|
+
def _hide_all_action_fields(self, build_config: dict) -> None:
|
|
2533
|
+
"""Hide and reset all action parameter inputs, regardless of trace flags."""
|
|
2534
|
+
# Hide known action fields
|
|
2535
|
+
for fname in list(self._all_fields):
|
|
2536
|
+
if fname in build_config and isinstance(build_config[fname], dict):
|
|
2537
|
+
build_config[fname]["show"] = False
|
|
2538
|
+
build_config[fname]["value"] = "" if fname not in self._bool_variables else False
|
|
2539
|
+
# Hide any other visible, non-protected fields that look like parameters
|
|
2540
|
+
protected = {
|
|
2541
|
+
"code",
|
|
2542
|
+
"entity_id",
|
|
2543
|
+
"api_key",
|
|
2544
|
+
"auth_link",
|
|
2545
|
+
"action_button",
|
|
2546
|
+
"tool_mode",
|
|
2547
|
+
"auth_mode",
|
|
2548
|
+
"auth_mode_pill",
|
|
2549
|
+
"create_auth_config",
|
|
2550
|
+
# Pre-defined auth fields
|
|
2551
|
+
"client_id",
|
|
2552
|
+
"client_secret",
|
|
2553
|
+
"verification_token",
|
|
2554
|
+
"redirect_uri",
|
|
2555
|
+
"authorization_url",
|
|
2556
|
+
"token_url",
|
|
2557
|
+
"api_key_field",
|
|
2558
|
+
"generic_api_key",
|
|
2559
|
+
"token",
|
|
2560
|
+
"access_token",
|
|
2561
|
+
"refresh_token",
|
|
2562
|
+
"username",
|
|
2563
|
+
"password",
|
|
2564
|
+
"domain",
|
|
2565
|
+
"base_url",
|
|
2566
|
+
"bearer_token",
|
|
2567
|
+
"authorization_code",
|
|
2568
|
+
"scopes",
|
|
2569
|
+
"subdomain",
|
|
2570
|
+
"instance_url",
|
|
2571
|
+
"tenant_id",
|
|
2572
|
+
}
|
|
2573
|
+
# Add all dynamic auth fields to protected set
|
|
2574
|
+
protected.update(self._auth_dynamic_fields)
|
|
2575
|
+
# Also protect any auth fields discovered across all instances
|
|
2576
|
+
protected.update(self.__class__.get_all_auth_field_names())
|
|
2577
|
+
|
|
2578
|
+
for key, cfg in list(build_config.items()):
|
|
2579
|
+
if key in protected:
|
|
2580
|
+
continue
|
|
2581
|
+
if isinstance(cfg, dict) and "show" in cfg:
|
|
2582
|
+
cfg["show"] = False
|
|
2583
|
+
if "value" in cfg:
|
|
2584
|
+
cfg["value"] = ""
|