lfx-nightly 0.1.11.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +268 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +346 -0
- lfx/base/agents/utils.py +205 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +1291 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +685 -0
- lfx/base/data/docling_utils.py +245 -0
- lfx/base/data/utils.py +198 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +20 -0
- lfx/base/io/text.py +22 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1398 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +47 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/groq_constants.py +134 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +307 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +8 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +49 -0
- lfx/base/models/openai_constants.py +122 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +224 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +319 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +441 -0
- lfx/cli/script_loader.py +247 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +411 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/agents/__init__.py +34 -0
- lfx/components/agents/agent.py +558 -0
- lfx/components/agents/mcp_component.py +501 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +112 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +124 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +163 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +167 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/composio/__init__.py +74 -0
- lfx/components/composio/composio_api.py +268 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/slack_composio.py +582 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +107 -0
- lfx/components/crewai/hierarchical_crew.py +46 -0
- lfx/components/crewai/hierarchical_task.py +44 -0
- lfx/components/crewai/sequential_crew.py +52 -0
- lfx/components/crewai/sequential_task.py +73 -0
- lfx/components/crewai/sequential_task_agent.py +143 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +64 -0
- lfx/components/data/api_request.py +544 -0
- lfx/components/data/csv_to_data.py +95 -0
- lfx/components/data/directory.py +113 -0
- lfx/components/data/file.py +577 -0
- lfx/components/data/json_to_data.py +98 -0
- lfx/components/data/news_search.py +164 -0
- lfx/components/data/rss.py +69 -0
- lfx/components/data/sql_executor.py +101 -0
- lfx/components/data/url.py +311 -0
- lfx/components/data/web_search.py +112 -0
- lfx/components/data/webhook.py +56 -0
- lfx/components/datastax/__init__.py +70 -0
- lfx/components/datastax/astra_assistant_manager.py +306 -0
- lfx/components/datastax/astra_db.py +75 -0
- lfx/components/datastax/astra_vectorize.py +124 -0
- lfx/components/datastax/astradb.py +1285 -0
- lfx/components/datastax/astradb_cql.py +314 -0
- lfx/components/datastax/astradb_graph.py +330 -0
- lfx/components/datastax/astradb_tool.py +414 -0
- lfx/components/datastax/astradb_vectorstore.py +1285 -0
- lfx/components/datastax/cassandra.py +92 -0
- lfx/components/datastax/create_assistant.py +58 -0
- lfx/components/datastax/create_thread.py +32 -0
- lfx/components/datastax/dotenv.py +35 -0
- lfx/components/datastax/get_assistant.py +37 -0
- lfx/components/datastax/getenvvar.py +30 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +314 -0
- lfx/components/datastax/list_assistants.py +25 -0
- lfx/components/datastax/run.py +89 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +231 -0
- lfx/components/docling/docling_remote.py +193 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +243 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +76 -0
- lfx/components/embeddings/text_embedder.py +64 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +192 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +147 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +136 -0
- lfx/components/helpers/__init__.py +52 -0
- lfx/components/helpers/calculator_core.py +89 -0
- lfx/components/helpers/create_list.py +40 -0
- lfx/components/helpers/current_date.py +42 -0
- lfx/components/helpers/id_generator.py +42 -0
- lfx/components/helpers/memory.py +251 -0
- lfx/components/helpers/output_parser.py +45 -0
- lfx/components/helpers/store_message.py +90 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +197 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +203 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +38 -0
- lfx/components/input_output/chat.py +120 -0
- lfx/components/input_output/chat_output.py +200 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +107 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +45 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +129 -0
- lfx/components/logic/__init__.py +52 -0
- lfx/components/logic/conditional_router.py +171 -0
- lfx/components/logic/data_conditional_router.py +125 -0
- lfx/components/logic/flow_tool.py +110 -0
- lfx/components/logic/listen.py +29 -0
- lfx/components/logic/loop.py +125 -0
- lfx/components/logic/notify.py +88 -0
- lfx/components/logic/pass_message.py +35 -0
- lfx/components/logic/run_flow.py +71 -0
- lfx/components/logic/sub_flow.py +114 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +136 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +34 -0
- lfx/components/models/embedding_model.py +114 -0
- lfx/components/models/language_model.py +144 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +157 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +330 -0
- lfx/components/ollama/ollama_embeddings.py +106 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +202 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +117 -0
- lfx/components/processing/alter_metadata.py +108 -0
- lfx/components/processing/batch_run.py +205 -0
- lfx/components/processing/combine_text.py +39 -0
- lfx/components/processing/converter.py +159 -0
- lfx/components/processing/create_data.py +110 -0
- lfx/components/processing/data_operations.py +438 -0
- lfx/components/processing/data_to_dataframe.py +70 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/extract_key.py +53 -0
- lfx/components/processing/filter_data.py +42 -0
- lfx/components/processing/filter_data_values.py +88 -0
- lfx/components/processing/json_cleaner.py +103 -0
- lfx/components/processing/lambda_filter.py +154 -0
- lfx/components/processing/llm_router.py +499 -0
- lfx/components/processing/merge_data.py +90 -0
- lfx/components/processing/message_to_data.py +36 -0
- lfx/components/processing/parse_data.py +70 -0
- lfx/components/processing/parse_dataframe.py +68 -0
- lfx/components/processing/parse_json_data.py +90 -0
- lfx/components/processing/parser.py +143 -0
- lfx/components/processing/prompt.py +67 -0
- lfx/components/processing/python_repl_core.py +98 -0
- lfx/components/processing/regex.py +82 -0
- lfx/components/processing/save_file.py +225 -0
- lfx/components/processing/select_data.py +48 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/structured_output.py +202 -0
- lfx/components/processing/update_data.py +160 -0
- lfx/components/prototypes/__init__.py +34 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +72 -0
- lfx/components/tools/calculator.py +108 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +327 -0
- lfx/components/tools/python_repl.py +97 -0
- lfx/components/tools/search_api.py +87 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +119 -0
- lfx/components/tools/tavily_search_tool.py +344 -0
- lfx/components/tools/wikidata_api.py +102 -0
- lfx/components/tools/wikipedia_api.py +49 -0
- lfx/components/tools/yahoo_finance.py +129 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +291 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +179 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +40 -0
- lfx/components/vectorstores/astradb.py +1285 -0
- lfx/components/vectorstores/astradb_graph.py +319 -0
- lfx/components/vectorstores/cassandra.py +264 -0
- lfx/components/vectorstores/cassandra_graph.py +238 -0
- lfx/components/vectorstores/chroma.py +167 -0
- lfx/components/vectorstores/clickhouse.py +135 -0
- lfx/components/vectorstores/couchbase.py +102 -0
- lfx/components/vectorstores/elasticsearch.py +267 -0
- lfx/components/vectorstores/faiss.py +111 -0
- lfx/components/vectorstores/graph_rag.py +141 -0
- lfx/components/vectorstores/hcd.py +314 -0
- lfx/components/vectorstores/local_db.py +261 -0
- lfx/components/vectorstores/milvus.py +115 -0
- lfx/components/vectorstores/mongodb_atlas.py +213 -0
- lfx/components/vectorstores/opensearch.py +243 -0
- lfx/components/vectorstores/pgvector.py +72 -0
- lfx/components/vectorstores/pinecone.py +134 -0
- lfx/components/vectorstores/qdrant.py +109 -0
- lfx/components/vectorstores/supabase.py +76 -0
- lfx/components/vectorstores/upstash.py +124 -0
- lfx/components/vectorstores/vectara.py +97 -0
- lfx/components/vectorstores/vectara_rag.py +164 -0
- lfx/components/vectorstores/weaviate.py +89 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +118 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +44 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +86 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1808 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +588 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +488 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +215 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +277 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2238 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +52 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +237 -0
- lfx/graph/utils.py +200 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +823 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +264 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +1 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +194 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +328 -0
- lfx/inputs/inputs.py +714 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +489 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +224 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +289 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +385 -0
- lfx/memory/__init__.py +90 -0
- lfx/memory/stubs.py +283 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/data.py +308 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +131 -0
- lfx/schema/json_schema.py +141 -0
- lfx/schema/log.py +61 -0
- lfx/schema/message.py +473 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +171 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +140 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +23 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/deps.py +129 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +172 -0
- lfx/services/schema.py +20 -0
- lfx/services/session.py +82 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +130 -0
- lfx/services/settings/base.py +539 -0
- lfx/services/settings/constants.py +31 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +12 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +155 -0
- lfx/services/storage/service.py +54 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +257 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +212 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +205 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +28 -0
- lfx/utils/image.py +73 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/util.py +481 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.1.11.dev0.dist-info/METADATA +293 -0
- lfx_nightly-0.1.11.dev0.dist-info/RECORD +699 -0
- lfx_nightly-0.1.11.dev0.dist-info/WHEEL +4 -0
- lfx_nightly-0.1.11.dev0.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,53 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from langchain_text_splitters import CharacterTextSplitter, TextSplitter
|
4
|
+
|
5
|
+
from lfx.base.textsplitters.model import LCTextSplitterComponent
|
6
|
+
from lfx.inputs.inputs import DataInput, IntInput, MessageTextInput
|
7
|
+
from lfx.utils.util import unescape_string
|
8
|
+
|
9
|
+
|
10
|
+
class CharacterTextSplitterComponent(LCTextSplitterComponent):
|
11
|
+
display_name = "Character Text Splitter"
|
12
|
+
description = "Split text by number of characters."
|
13
|
+
documentation = "https://docs.langflow.org/components/text-splitters#charactertextsplitter"
|
14
|
+
name = "CharacterTextSplitter"
|
15
|
+
icon = "LangChain"
|
16
|
+
|
17
|
+
inputs = [
|
18
|
+
IntInput(
|
19
|
+
name="chunk_size",
|
20
|
+
display_name="Chunk Size",
|
21
|
+
info="The maximum length of each chunk.",
|
22
|
+
value=1000,
|
23
|
+
),
|
24
|
+
IntInput(
|
25
|
+
name="chunk_overlap",
|
26
|
+
display_name="Chunk Overlap",
|
27
|
+
info="The amount of overlap between chunks.",
|
28
|
+
value=200,
|
29
|
+
),
|
30
|
+
DataInput(
|
31
|
+
name="data_input",
|
32
|
+
display_name="Input",
|
33
|
+
info="The texts to split.",
|
34
|
+
input_types=["Document", "Data"],
|
35
|
+
required=True,
|
36
|
+
),
|
37
|
+
MessageTextInput(
|
38
|
+
name="separator",
|
39
|
+
display_name="Separator",
|
40
|
+
info='The characters to split on.\nIf left empty defaults to "\\n\\n".',
|
41
|
+
),
|
42
|
+
]
|
43
|
+
|
44
|
+
def get_data_input(self) -> Any:
|
45
|
+
return self.data_input
|
46
|
+
|
47
|
+
def build_text_splitter(self) -> TextSplitter:
|
48
|
+
separator = unescape_string(self.separator) if self.separator else "\n\n"
|
49
|
+
return CharacterTextSplitter(
|
50
|
+
chunk_overlap=self.chunk_overlap,
|
51
|
+
chunk_size=self.chunk_size,
|
52
|
+
separator=separator,
|
53
|
+
)
|
@@ -0,0 +1,59 @@
|
|
1
|
+
from lfx.base.chains.model import LCChainComponent
|
2
|
+
from lfx.inputs.inputs import HandleInput, MultilineInput
|
3
|
+
from lfx.schema.message import Message
|
4
|
+
|
5
|
+
|
6
|
+
class ConversationChainComponent(LCChainComponent):
|
7
|
+
display_name = "ConversationChain"
|
8
|
+
description = "Chain to have a conversation and load context from memory."
|
9
|
+
name = "ConversationChain"
|
10
|
+
legacy: bool = True
|
11
|
+
icon = "LangChain"
|
12
|
+
|
13
|
+
inputs = [
|
14
|
+
MultilineInput(
|
15
|
+
name="input_value",
|
16
|
+
display_name="Input",
|
17
|
+
info="The input value to pass to the chain.",
|
18
|
+
required=True,
|
19
|
+
),
|
20
|
+
HandleInput(
|
21
|
+
name="llm",
|
22
|
+
display_name="Language Model",
|
23
|
+
input_types=["LanguageModel"],
|
24
|
+
required=True,
|
25
|
+
),
|
26
|
+
HandleInput(
|
27
|
+
name="memory",
|
28
|
+
display_name="Memory",
|
29
|
+
input_types=["BaseChatMemory"],
|
30
|
+
),
|
31
|
+
]
|
32
|
+
|
33
|
+
def invoke_chain(self) -> Message:
|
34
|
+
try:
|
35
|
+
from langchain.chains import ConversationChain
|
36
|
+
except ImportError as e:
|
37
|
+
msg = (
|
38
|
+
"ConversationChain requires langchain to be installed. Please install it with "
|
39
|
+
"`uv pip install langchain`."
|
40
|
+
)
|
41
|
+
raise ImportError(msg) from e
|
42
|
+
|
43
|
+
if not self.memory:
|
44
|
+
chain = ConversationChain(llm=self.llm)
|
45
|
+
else:
|
46
|
+
chain = ConversationChain(llm=self.llm, memory=self.memory)
|
47
|
+
|
48
|
+
result = chain.invoke(
|
49
|
+
{"input": self.input_value},
|
50
|
+
config={"callbacks": self.get_langchain_callbacks()},
|
51
|
+
)
|
52
|
+
if isinstance(result, dict):
|
53
|
+
result = result.get(chain.output_key, "")
|
54
|
+
|
55
|
+
elif not isinstance(result, str):
|
56
|
+
result = result.get("response")
|
57
|
+
result = str(result)
|
58
|
+
self.status = result
|
59
|
+
return Message(text=result)
|
@@ -0,0 +1,107 @@
|
|
1
|
+
from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
|
2
|
+
|
3
|
+
from lfx.base.agents.agent import LCAgentComponent
|
4
|
+
from lfx.field_typing import AgentExecutor
|
5
|
+
from lfx.inputs.inputs import (
|
6
|
+
DictInput,
|
7
|
+
DropdownInput,
|
8
|
+
FileInput,
|
9
|
+
HandleInput,
|
10
|
+
MessageTextInput,
|
11
|
+
)
|
12
|
+
from lfx.schema.message import Message
|
13
|
+
from lfx.template.field.base import Output
|
14
|
+
|
15
|
+
|
16
|
+
class CSVAgentComponent(LCAgentComponent):
|
17
|
+
display_name = "CSV Agent"
|
18
|
+
description = "Construct a CSV agent from a CSV and tools."
|
19
|
+
documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv"
|
20
|
+
name = "CSVAgent"
|
21
|
+
icon = "LangChain"
|
22
|
+
|
23
|
+
inputs = [
|
24
|
+
*LCAgentComponent.get_base_inputs(),
|
25
|
+
HandleInput(
|
26
|
+
name="llm",
|
27
|
+
display_name="Language Model",
|
28
|
+
input_types=["LanguageModel"],
|
29
|
+
required=True,
|
30
|
+
info="An LLM Model Object (It can be found in any LLM Component).",
|
31
|
+
),
|
32
|
+
FileInput(
|
33
|
+
name="path",
|
34
|
+
display_name="File Path",
|
35
|
+
file_types=["csv"],
|
36
|
+
input_types=["str", "Message"],
|
37
|
+
required=True,
|
38
|
+
info="A CSV File or File Path.",
|
39
|
+
),
|
40
|
+
DropdownInput(
|
41
|
+
name="agent_type",
|
42
|
+
display_name="Agent Type",
|
43
|
+
advanced=True,
|
44
|
+
options=["zero-shot-react-description", "openai-functions", "openai-tools"],
|
45
|
+
value="openai-tools",
|
46
|
+
),
|
47
|
+
MessageTextInput(
|
48
|
+
name="input_value",
|
49
|
+
display_name="Text",
|
50
|
+
info="Text to be passed as input and extract info from the CSV File.",
|
51
|
+
required=True,
|
52
|
+
),
|
53
|
+
DictInput(
|
54
|
+
name="pandas_kwargs",
|
55
|
+
display_name="Pandas Kwargs",
|
56
|
+
info="Pandas Kwargs to be passed to the agent.",
|
57
|
+
advanced=True,
|
58
|
+
is_list=True,
|
59
|
+
),
|
60
|
+
]
|
61
|
+
|
62
|
+
outputs = [
|
63
|
+
Output(display_name="Response", name="response", method="build_agent_response"),
|
64
|
+
Output(display_name="Agent", name="agent", method="build_agent", hidden=True, tool_mode=False),
|
65
|
+
]
|
66
|
+
|
67
|
+
def _path(self) -> str:
|
68
|
+
if isinstance(self.path, Message) and isinstance(self.path.text, str):
|
69
|
+
return self.path.text
|
70
|
+
return self.path
|
71
|
+
|
72
|
+
def build_agent_response(self) -> Message:
|
73
|
+
agent_kwargs = {
|
74
|
+
"verbose": self.verbose,
|
75
|
+
"allow_dangerous_code": True,
|
76
|
+
}
|
77
|
+
|
78
|
+
agent_csv = create_csv_agent(
|
79
|
+
llm=self.llm,
|
80
|
+
path=self._path(),
|
81
|
+
agent_type=self.agent_type,
|
82
|
+
handle_parsing_errors=self.handle_parsing_errors,
|
83
|
+
pandas_kwargs=self.pandas_kwargs,
|
84
|
+
**agent_kwargs,
|
85
|
+
)
|
86
|
+
|
87
|
+
result = agent_csv.invoke({"input": self.input_value})
|
88
|
+
return Message(text=str(result["output"]))
|
89
|
+
|
90
|
+
def build_agent(self) -> AgentExecutor:
|
91
|
+
agent_kwargs = {
|
92
|
+
"verbose": self.verbose,
|
93
|
+
"allow_dangerous_code": True,
|
94
|
+
}
|
95
|
+
|
96
|
+
agent_csv = create_csv_agent(
|
97
|
+
llm=self.llm,
|
98
|
+
path=self._path(),
|
99
|
+
agent_type=self.agent_type,
|
100
|
+
handle_parsing_errors=self.handle_parsing_errors,
|
101
|
+
pandas_kwargs=self.pandas_kwargs,
|
102
|
+
**agent_kwargs,
|
103
|
+
)
|
104
|
+
|
105
|
+
self.status = Message(text=str(agent_csv))
|
106
|
+
|
107
|
+
return agent_csv
|
@@ -0,0 +1,26 @@
|
|
1
|
+
from langchain_community.embeddings import FakeEmbeddings
|
2
|
+
|
3
|
+
from lfx.base.embeddings.model import LCEmbeddingsModel
|
4
|
+
from lfx.field_typing import Embeddings
|
5
|
+
from lfx.io import IntInput
|
6
|
+
|
7
|
+
|
8
|
+
class FakeEmbeddingsComponent(LCEmbeddingsModel):
|
9
|
+
display_name = "Fake Embeddings"
|
10
|
+
description = "Generate fake embeddings, useful for initial testing and connecting components."
|
11
|
+
icon = "LangChain"
|
12
|
+
name = "LangChainFakeEmbeddings"
|
13
|
+
|
14
|
+
inputs = [
|
15
|
+
IntInput(
|
16
|
+
name="dimensions",
|
17
|
+
display_name="Dimensions",
|
18
|
+
info="The number of dimensions the resulting output embeddings should have.",
|
19
|
+
value=5,
|
20
|
+
),
|
21
|
+
]
|
22
|
+
|
23
|
+
def build_embeddings(self) -> Embeddings:
|
24
|
+
return FakeEmbeddings(
|
25
|
+
size=self.dimensions or 5,
|
26
|
+
)
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from langchain_community.graph_vectorstores.extractors import HtmlLinkExtractor, LinkExtractorTransformer
|
4
|
+
from langchain_core.documents import BaseDocumentTransformer
|
5
|
+
|
6
|
+
from lfx.base.document_transformers.model import LCDocumentTransformerComponent
|
7
|
+
from lfx.inputs.inputs import BoolInput, DataInput, StrInput
|
8
|
+
|
9
|
+
|
10
|
+
class HtmlLinkExtractorComponent(LCDocumentTransformerComponent):
|
11
|
+
display_name = "HTML Link Extractor"
|
12
|
+
description = "Extract hyperlinks from HTML content."
|
13
|
+
documentation = "https://python.langchain.com/v0.2/api_reference/community/graph_vectorstores/langchain_community.graph_vectorstores.extractors.html_link_extractor.HtmlLinkExtractor.html"
|
14
|
+
name = "HtmlLinkExtractor"
|
15
|
+
icon = "LangChain"
|
16
|
+
|
17
|
+
inputs = [
|
18
|
+
StrInput(name="kind", display_name="Kind of edge", value="hyperlink", required=False),
|
19
|
+
BoolInput(name="drop_fragments", display_name="Drop URL fragments", value=True, required=False),
|
20
|
+
DataInput(
|
21
|
+
name="data_input",
|
22
|
+
display_name="Input",
|
23
|
+
info="The texts from which to extract links.",
|
24
|
+
input_types=["Document", "Data"],
|
25
|
+
required=True,
|
26
|
+
),
|
27
|
+
]
|
28
|
+
|
29
|
+
def get_data_input(self) -> Any:
|
30
|
+
return self.data_input
|
31
|
+
|
32
|
+
def build_document_transformer(self) -> BaseDocumentTransformer:
|
33
|
+
return LinkExtractorTransformer(
|
34
|
+
[HtmlLinkExtractor(kind=self.kind, drop_fragments=self.drop_fragments).as_document_extractor()]
|
35
|
+
)
|
@@ -0,0 +1,45 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
|
3
|
+
import yaml
|
4
|
+
from langchain.agents import AgentExecutor
|
5
|
+
from langchain_community.agent_toolkits import create_json_agent
|
6
|
+
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
|
7
|
+
from langchain_community.tools.json.tool import JsonSpec
|
8
|
+
|
9
|
+
from lfx.base.agents.agent import LCAgentComponent
|
10
|
+
from lfx.inputs.inputs import FileInput, HandleInput
|
11
|
+
|
12
|
+
|
13
|
+
class JsonAgentComponent(LCAgentComponent):
|
14
|
+
display_name = "JsonAgent"
|
15
|
+
description = "Construct a json agent from an LLM and tools."
|
16
|
+
name = "JsonAgent"
|
17
|
+
legacy: bool = True
|
18
|
+
|
19
|
+
inputs = [
|
20
|
+
*LCAgentComponent.get_base_inputs(),
|
21
|
+
HandleInput(
|
22
|
+
name="llm",
|
23
|
+
display_name="Language Model",
|
24
|
+
input_types=["LanguageModel"],
|
25
|
+
required=True,
|
26
|
+
),
|
27
|
+
FileInput(
|
28
|
+
name="path",
|
29
|
+
display_name="File Path",
|
30
|
+
file_types=["json", "yaml", "yml"],
|
31
|
+
required=True,
|
32
|
+
),
|
33
|
+
]
|
34
|
+
|
35
|
+
def build_agent(self) -> AgentExecutor:
|
36
|
+
path = Path(self.path)
|
37
|
+
if path.suffix in {"yaml", "yml"}:
|
38
|
+
with path.open(encoding="utf-8") as file:
|
39
|
+
yaml_dict = yaml.safe_load(file)
|
40
|
+
spec = JsonSpec(dict_=yaml_dict)
|
41
|
+
else:
|
42
|
+
spec = JsonSpec.from_file(path)
|
43
|
+
toolkit = JsonToolkit(spec=spec)
|
44
|
+
|
45
|
+
return create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs())
|
@@ -0,0 +1,126 @@
|
|
1
|
+
import re
|
2
|
+
|
3
|
+
from langchain_core.prompts import HumanMessagePromptTemplate
|
4
|
+
|
5
|
+
from lfx.custom.custom_component.component import Component
|
6
|
+
from lfx.inputs.inputs import DefaultPromptField, SecretStrInput, StrInput
|
7
|
+
from lfx.io import Output
|
8
|
+
from lfx.schema.message import Message
|
9
|
+
|
10
|
+
|
11
|
+
class LangChainHubPromptComponent(Component):
|
12
|
+
display_name: str = "Prompt Hub"
|
13
|
+
description: str = "Prompt Component that uses LangChain Hub prompts"
|
14
|
+
beta = True
|
15
|
+
icon = "LangChain"
|
16
|
+
trace_type = "prompt"
|
17
|
+
name = "LangChain Hub Prompt"
|
18
|
+
|
19
|
+
inputs = [
|
20
|
+
SecretStrInput(
|
21
|
+
name="langchain_api_key",
|
22
|
+
display_name="Your LangChain API Key",
|
23
|
+
info="The LangChain API Key to use.",
|
24
|
+
required=True,
|
25
|
+
),
|
26
|
+
StrInput(
|
27
|
+
name="langchain_hub_prompt",
|
28
|
+
display_name="LangChain Hub Prompt",
|
29
|
+
info="The LangChain Hub prompt to use, i.e., 'efriis/my-first-prompt'",
|
30
|
+
refresh_button=True,
|
31
|
+
required=True,
|
32
|
+
),
|
33
|
+
]
|
34
|
+
|
35
|
+
outputs = [
|
36
|
+
Output(display_name="Build Prompt", name="prompt", method="build_prompt"),
|
37
|
+
]
|
38
|
+
|
39
|
+
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
|
40
|
+
# If the field is not langchain_hub_prompt or the value is empty, return the build config as is
|
41
|
+
if field_name != "langchain_hub_prompt" or not field_value:
|
42
|
+
return build_config
|
43
|
+
|
44
|
+
# Fetch the template
|
45
|
+
template = self._fetch_langchain_hub_template()
|
46
|
+
|
47
|
+
# Get the template's messages
|
48
|
+
if hasattr(template, "messages"):
|
49
|
+
template_messages = template.messages
|
50
|
+
else:
|
51
|
+
template_messages = [HumanMessagePromptTemplate(prompt=template)]
|
52
|
+
|
53
|
+
# Extract the messages from the prompt data
|
54
|
+
prompt_template = [message_data.prompt for message_data in template_messages]
|
55
|
+
|
56
|
+
# Regular expression to find all instances of {<string>}
|
57
|
+
pattern = r"\{(.*?)\}"
|
58
|
+
|
59
|
+
# Get all the custom fields
|
60
|
+
custom_fields: list[str] = []
|
61
|
+
full_template = ""
|
62
|
+
for message in prompt_template:
|
63
|
+
# Find all matches
|
64
|
+
matches = re.findall(pattern, message.template)
|
65
|
+
custom_fields += matches
|
66
|
+
|
67
|
+
# Create a string version of the full template
|
68
|
+
full_template = full_template + "\n" + message.template
|
69
|
+
|
70
|
+
# No need to reprocess if we have them already
|
71
|
+
if all("param_" + custom_field in build_config for custom_field in custom_fields):
|
72
|
+
return build_config
|
73
|
+
|
74
|
+
# Easter egg: Show template in info popup
|
75
|
+
build_config["langchain_hub_prompt"]["info"] = full_template
|
76
|
+
|
77
|
+
# Remove old parameter inputs if any
|
78
|
+
for key in build_config.copy():
|
79
|
+
if key.startswith("param_"):
|
80
|
+
del build_config[key]
|
81
|
+
|
82
|
+
# Now create inputs for each
|
83
|
+
for custom_field in custom_fields:
|
84
|
+
new_parameter = DefaultPromptField(
|
85
|
+
name=f"param_{custom_field}",
|
86
|
+
display_name=custom_field,
|
87
|
+
info="Fill in the value for {" + custom_field + "}",
|
88
|
+
).to_dict()
|
89
|
+
|
90
|
+
# Add the new parameter to the build config
|
91
|
+
build_config[f"param_{custom_field}"] = new_parameter
|
92
|
+
|
93
|
+
return build_config
|
94
|
+
|
95
|
+
async def build_prompt(
|
96
|
+
self,
|
97
|
+
) -> Message:
|
98
|
+
# Fetch the template
|
99
|
+
template = self._fetch_langchain_hub_template()
|
100
|
+
|
101
|
+
# Get the parameters from the attributes
|
102
|
+
params_dict = {param: getattr(self, "param_" + param, f"{{{param}}}") for param in template.input_variables}
|
103
|
+
original_params = {k: v.text if hasattr(v, "text") else v for k, v in params_dict.items() if v is not None}
|
104
|
+
prompt_value = template.invoke(original_params)
|
105
|
+
|
106
|
+
# Update the template with the new value
|
107
|
+
original_params["template"] = prompt_value.to_string()
|
108
|
+
|
109
|
+
# Now pass the filtered attributes to the function
|
110
|
+
prompt = Message.from_template(**original_params)
|
111
|
+
|
112
|
+
self.status = prompt.text
|
113
|
+
|
114
|
+
return prompt
|
115
|
+
|
116
|
+
def _fetch_langchain_hub_template(self):
|
117
|
+
import langchain.hub
|
118
|
+
|
119
|
+
# Check if the api key is provided
|
120
|
+
if not self.langchain_api_key:
|
121
|
+
msg = "Please provide a LangChain API Key"
|
122
|
+
|
123
|
+
raise ValueError(msg)
|
124
|
+
|
125
|
+
# Pull the prompt from LangChain Hub
|
126
|
+
return langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key)
|
@@ -0,0 +1,49 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from langchain_text_splitters import Language, RecursiveCharacterTextSplitter, TextSplitter
|
4
|
+
|
5
|
+
from lfx.base.textsplitters.model import LCTextSplitterComponent
|
6
|
+
from lfx.inputs.inputs import DataInput, DropdownInput, IntInput
|
7
|
+
|
8
|
+
|
9
|
+
class LanguageRecursiveTextSplitterComponent(LCTextSplitterComponent):
|
10
|
+
display_name: str = "Language Recursive Text Splitter"
|
11
|
+
description: str = "Split text into chunks of a specified length based on language."
|
12
|
+
documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter"
|
13
|
+
name = "LanguageRecursiveTextSplitter"
|
14
|
+
icon = "LangChain"
|
15
|
+
|
16
|
+
inputs = [
|
17
|
+
IntInput(
|
18
|
+
name="chunk_size",
|
19
|
+
display_name="Chunk Size",
|
20
|
+
info="The maximum length of each chunk.",
|
21
|
+
value=1000,
|
22
|
+
),
|
23
|
+
IntInput(
|
24
|
+
name="chunk_overlap",
|
25
|
+
display_name="Chunk Overlap",
|
26
|
+
info="The amount of overlap between chunks.",
|
27
|
+
value=200,
|
28
|
+
),
|
29
|
+
DataInput(
|
30
|
+
name="data_input",
|
31
|
+
display_name="Input",
|
32
|
+
info="The texts to split.",
|
33
|
+
input_types=["Document", "Data"],
|
34
|
+
required=True,
|
35
|
+
),
|
36
|
+
DropdownInput(
|
37
|
+
name="code_language", display_name="Code Language", options=[x.value for x in Language], value="python"
|
38
|
+
),
|
39
|
+
]
|
40
|
+
|
41
|
+
def get_data_input(self) -> Any:
|
42
|
+
return self.data_input
|
43
|
+
|
44
|
+
def build_text_splitter(self) -> TextSplitter:
|
45
|
+
return RecursiveCharacterTextSplitter.from_language(
|
46
|
+
language=Language(self.code_language),
|
47
|
+
chunk_size=self.chunk_size,
|
48
|
+
chunk_overlap=self.chunk_overlap,
|
49
|
+
)
|
@@ -0,0 +1,138 @@
|
|
1
|
+
from langchain.docstore.document import Document
|
2
|
+
from langchain_experimental.text_splitter import SemanticChunker
|
3
|
+
|
4
|
+
from lfx.base.textsplitters.model import LCTextSplitterComponent
|
5
|
+
from lfx.io import (
|
6
|
+
DropdownInput,
|
7
|
+
FloatInput,
|
8
|
+
HandleInput,
|
9
|
+
IntInput,
|
10
|
+
MessageTextInput,
|
11
|
+
Output,
|
12
|
+
)
|
13
|
+
from lfx.schema.data import Data
|
14
|
+
|
15
|
+
|
16
|
+
class SemanticTextSplitterComponent(LCTextSplitterComponent):
|
17
|
+
"""Split text into semantically meaningful chunks using semantic similarity."""
|
18
|
+
|
19
|
+
display_name: str = "Semantic Text Splitter"
|
20
|
+
name: str = "SemanticTextSplitter"
|
21
|
+
description: str = "Split text into semantically meaningful chunks using semantic similarity."
|
22
|
+
documentation = "https://python.langchain.com/docs/how_to/semantic-chunker/"
|
23
|
+
beta = True # this component is beta because it is imported from langchain_experimental
|
24
|
+
icon = "LangChain"
|
25
|
+
|
26
|
+
inputs = [
|
27
|
+
HandleInput(
|
28
|
+
name="data_inputs",
|
29
|
+
display_name="Data Inputs",
|
30
|
+
info="List of Data objects containing text and metadata to split.",
|
31
|
+
input_types=["Data"],
|
32
|
+
is_list=True,
|
33
|
+
required=True,
|
34
|
+
),
|
35
|
+
HandleInput(
|
36
|
+
name="embeddings",
|
37
|
+
display_name="Embeddings",
|
38
|
+
info="Embeddings model to use for semantic similarity. Required.",
|
39
|
+
input_types=["Embeddings"],
|
40
|
+
is_list=False,
|
41
|
+
required=True,
|
42
|
+
),
|
43
|
+
DropdownInput(
|
44
|
+
name="breakpoint_threshold_type",
|
45
|
+
display_name="Breakpoint Threshold Type",
|
46
|
+
info=(
|
47
|
+
"Method to determine breakpoints. Options: 'percentile', "
|
48
|
+
"'standard_deviation', 'interquartile'. Defaults to 'percentile'."
|
49
|
+
),
|
50
|
+
value="percentile",
|
51
|
+
options=["percentile", "standard_deviation", "interquartile"],
|
52
|
+
),
|
53
|
+
FloatInput(
|
54
|
+
name="breakpoint_threshold_amount",
|
55
|
+
display_name="Breakpoint Threshold Amount",
|
56
|
+
info="Numerical amount for the breakpoint threshold.",
|
57
|
+
value=0.5,
|
58
|
+
),
|
59
|
+
IntInput(
|
60
|
+
name="number_of_chunks",
|
61
|
+
display_name="Number of Chunks",
|
62
|
+
info="Number of chunks to split the text into.",
|
63
|
+
value=5,
|
64
|
+
),
|
65
|
+
MessageTextInput(
|
66
|
+
name="sentence_split_regex",
|
67
|
+
display_name="Sentence Split Regex",
|
68
|
+
info="Regular expression to split sentences. Optional.",
|
69
|
+
value="",
|
70
|
+
advanced=True,
|
71
|
+
),
|
72
|
+
IntInput(
|
73
|
+
name="buffer_size",
|
74
|
+
display_name="Buffer Size",
|
75
|
+
info="Size of the buffer.",
|
76
|
+
value=0,
|
77
|
+
advanced=True,
|
78
|
+
),
|
79
|
+
]
|
80
|
+
|
81
|
+
outputs = [
|
82
|
+
Output(display_name="Chunks", name="chunks", method="split_text"),
|
83
|
+
]
|
84
|
+
|
85
|
+
def _docs_to_data(self, docs: list[Document]) -> list[Data]:
|
86
|
+
"""Convert a list of Document objects to Data objects."""
|
87
|
+
return [Data(text=doc.page_content, data=doc.metadata) for doc in docs]
|
88
|
+
|
89
|
+
def split_text(self) -> list[Data]:
|
90
|
+
"""Split the input data into semantically meaningful chunks."""
|
91
|
+
try:
|
92
|
+
embeddings = getattr(self, "embeddings", None)
|
93
|
+
if embeddings is None:
|
94
|
+
error_msg = "An embeddings model is required for SemanticTextSplitter."
|
95
|
+
raise ValueError(error_msg)
|
96
|
+
|
97
|
+
if not self.data_inputs:
|
98
|
+
error_msg = "Data inputs cannot be empty."
|
99
|
+
raise ValueError(error_msg)
|
100
|
+
|
101
|
+
documents = []
|
102
|
+
for _input in self.data_inputs:
|
103
|
+
if isinstance(_input, Data):
|
104
|
+
documents.append(_input.to_lc_document())
|
105
|
+
else:
|
106
|
+
error_msg = f"Invalid data input type: {_input}"
|
107
|
+
raise TypeError(error_msg)
|
108
|
+
|
109
|
+
if not documents:
|
110
|
+
error_msg = "No valid Data objects found in data_inputs."
|
111
|
+
raise ValueError(error_msg)
|
112
|
+
|
113
|
+
texts = [doc.page_content for doc in documents]
|
114
|
+
metadatas = [doc.metadata for doc in documents]
|
115
|
+
|
116
|
+
splitter_params = {
|
117
|
+
"embeddings": embeddings,
|
118
|
+
"breakpoint_threshold_type": self.breakpoint_threshold_type or "percentile",
|
119
|
+
"breakpoint_threshold_amount": self.breakpoint_threshold_amount,
|
120
|
+
"number_of_chunks": self.number_of_chunks,
|
121
|
+
"buffer_size": self.buffer_size,
|
122
|
+
}
|
123
|
+
|
124
|
+
if self.sentence_split_regex:
|
125
|
+
splitter_params["sentence_split_regex"] = self.sentence_split_regex
|
126
|
+
|
127
|
+
splitter = SemanticChunker(**splitter_params)
|
128
|
+
docs = splitter.create_documents(texts, metadatas=metadatas)
|
129
|
+
|
130
|
+
data = self._docs_to_data(docs)
|
131
|
+
self.status = data
|
132
|
+
|
133
|
+
except Exception as e:
|
134
|
+
error_msg = f"An error occurred during semantic splitting: {e}"
|
135
|
+
raise RuntimeError(error_msg) from e
|
136
|
+
|
137
|
+
else:
|
138
|
+
return data
|
@@ -0,0 +1,39 @@
|
|
1
|
+
from langchain.chains import LLMCheckerChain
|
2
|
+
|
3
|
+
from lfx.base.chains.model import LCChainComponent
|
4
|
+
from lfx.inputs.inputs import HandleInput, MultilineInput
|
5
|
+
from lfx.schema import Message
|
6
|
+
|
7
|
+
|
8
|
+
class LLMCheckerChainComponent(LCChainComponent):
|
9
|
+
display_name = "LLMCheckerChain"
|
10
|
+
description = "Chain for question-answering with self-verification."
|
11
|
+
documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker"
|
12
|
+
name = "LLMCheckerChain"
|
13
|
+
legacy: bool = True
|
14
|
+
icon = "LangChain"
|
15
|
+
inputs = [
|
16
|
+
MultilineInput(
|
17
|
+
name="input_value",
|
18
|
+
display_name="Input",
|
19
|
+
info="The input value to pass to the chain.",
|
20
|
+
required=True,
|
21
|
+
),
|
22
|
+
HandleInput(
|
23
|
+
name="llm",
|
24
|
+
display_name="Language Model",
|
25
|
+
input_types=["LanguageModel"],
|
26
|
+
required=True,
|
27
|
+
),
|
28
|
+
]
|
29
|
+
|
30
|
+
def invoke_chain(self) -> Message:
|
31
|
+
chain = LLMCheckerChain.from_llm(llm=self.llm)
|
32
|
+
response = chain.invoke(
|
33
|
+
{chain.input_key: self.input_value},
|
34
|
+
config={"callbacks": self.get_langchain_callbacks()},
|
35
|
+
)
|
36
|
+
result = response.get(chain.output_key, "")
|
37
|
+
result = str(result)
|
38
|
+
self.status = result
|
39
|
+
return Message(text=result)
|