langflow-base-nightly 0.5.0.dev36__py3-none-any.whl → 0.5.0.dev38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langflow/__main__.py +1 -1
- langflow/alembic/versions/4e5980a44eaa_fix_date_times_again.py +24 -30
- langflow/alembic/versions/58b28437a398_modify_nullable.py +6 -6
- langflow/alembic/versions/79e675cb6752_change_datetime_type.py +24 -30
- langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +12 -13
- langflow/api/build.py +21 -26
- langflow/api/health_check_router.py +3 -3
- langflow/api/utils.py +3 -3
- langflow/api/v1/callback.py +2 -2
- langflow/api/v1/chat.py +19 -31
- langflow/api/v1/endpoints.py +10 -10
- langflow/api/v1/flows.py +1 -1
- langflow/api/v1/knowledge_bases.py +19 -12
- langflow/api/v1/mcp.py +12 -12
- langflow/api/v1/mcp_projects.py +45 -81
- langflow/api/v1/mcp_utils.py +8 -8
- langflow/api/v1/schemas.py +1 -5
- langflow/api/v1/store.py +1 -1
- langflow/api/v1/validate.py +2 -2
- langflow/api/v1/voice_mode.py +58 -62
- langflow/api/v2/files.py +5 -3
- langflow/api/v2/mcp.py +10 -9
- langflow/base/composio/composio_base.py +21 -2
- langflow/base/data/docling_utils.py +194 -0
- langflow/base/data/kb_utils.py +33 -0
- langflow/base/embeddings/aiml_embeddings.py +1 -1
- langflow/base/flow_processing/utils.py +1 -2
- langflow/base/io/__init__.py +0 -1
- langflow/base/langwatch/utils.py +2 -1
- langflow/base/mcp/util.py +49 -47
- langflow/base/models/model.py +3 -3
- langflow/base/prompts/api_utils.py +1 -1
- langflow/base/tools/flow_tool.py +2 -2
- langflow/base/tools/run_flow.py +2 -6
- langflow/components/Notion/add_content_to_page.py +2 -2
- langflow/components/Notion/list_database_properties.py +2 -2
- langflow/components/Notion/list_pages.py +2 -2
- langflow/components/Notion/page_content_viewer.py +2 -2
- langflow/components/Notion/update_page_property.py +1 -1
- langflow/components/agentql/agentql_api.py +2 -10
- langflow/components/agents/agent.py +3 -3
- langflow/components/agents/mcp_component.py +54 -69
- langflow/components/anthropic/anthropic.py +5 -4
- langflow/components/assemblyai/assemblyai_get_subtitles.py +2 -2
- langflow/components/assemblyai/assemblyai_lemur.py +2 -2
- langflow/components/assemblyai/assemblyai_list_transcripts.py +2 -2
- langflow/components/assemblyai/assemblyai_poll_transcript.py +2 -2
- langflow/components/assemblyai/assemblyai_start_transcript.py +2 -2
- langflow/components/data/file.py +575 -55
- langflow/components/data/kb_ingest.py +116 -43
- langflow/components/data/kb_retrieval.py +24 -26
- langflow/components/data/url.py +1 -1
- langflow/components/datastax/astra_assistant_manager.py +3 -3
- langflow/components/datastax/create_assistant.py +1 -2
- langflow/components/deactivated/merge_data.py +1 -2
- langflow/components/deactivated/sub_flow.py +6 -7
- langflow/components/deactivated/vectara_self_query.py +3 -3
- langflow/components/docling/__init__.py +0 -198
- langflow/components/docling/docling_inline.py +1 -1
- langflow/components/embeddings/text_embedder.py +3 -3
- langflow/components/firecrawl/firecrawl_extract_api.py +2 -9
- langflow/components/google/gmail.py +1 -1
- langflow/components/google/google_generative_ai.py +5 -11
- langflow/components/groq/groq.py +4 -3
- langflow/components/helpers/current_date.py +2 -3
- langflow/components/helpers/memory.py +1 -1
- langflow/components/ibm/watsonx.py +1 -1
- langflow/components/ibm/watsonx_embeddings.py +1 -1
- langflow/components/langwatch/langwatch.py +3 -3
- langflow/components/logic/flow_tool.py +2 -2
- langflow/components/logic/notify.py +1 -1
- langflow/components/logic/run_flow.py +2 -3
- langflow/components/logic/sub_flow.py +4 -5
- langflow/components/mem0/mem0_chat_memory.py +2 -8
- langflow/components/nvidia/nvidia.py +3 -3
- langflow/components/olivya/olivya.py +7 -7
- langflow/components/ollama/ollama.py +8 -6
- langflow/components/processing/batch_run.py +8 -8
- langflow/components/processing/data_operations.py +2 -2
- langflow/components/processing/merge_data.py +1 -2
- langflow/components/processing/message_to_data.py +2 -3
- langflow/components/processing/parse_json_data.py +1 -1
- langflow/components/processing/save_file.py +6 -32
- langflow/components/prototypes/python_function.py +2 -3
- langflow/components/serpapi/serp.py +1 -1
- langflow/components/tavily/tavily_extract.py +1 -1
- langflow/components/tavily/tavily_search.py +1 -1
- langflow/components/tools/calculator.py +2 -2
- langflow/components/tools/python_code_structured_tool.py +3 -10
- langflow/components/tools/python_repl.py +2 -2
- langflow/components/tools/searxng.py +3 -3
- langflow/components/tools/serp_api.py +2 -2
- langflow/components/tools/tavily_search_tool.py +2 -2
- langflow/components/tools/yahoo_finance.py +1 -1
- langflow/components/twelvelabs/video_embeddings.py +4 -4
- langflow/components/vectorstores/astradb.py +30 -19
- langflow/components/vectorstores/local_db.py +1 -1
- langflow/components/yahoosearch/yahoo.py +1 -1
- langflow/components/youtube/trending.py +3 -4
- langflow/custom/attributes.py +2 -1
- langflow/custom/code_parser/code_parser.py +1 -1
- langflow/custom/custom_component/base_component.py +1 -1
- langflow/custom/custom_component/component.py +16 -2
- langflow/custom/directory_reader/directory_reader.py +7 -7
- langflow/custom/directory_reader/utils.py +1 -2
- langflow/custom/utils.py +30 -30
- langflow/events/event_manager.py +1 -1
- langflow/frontend/assets/{SlackIcon-B260Qg_R.js → SlackIcon-BhW6H3JR.js} +1 -1
- langflow/frontend/assets/{Wikipedia-BB2mbgyd.js → Wikipedia-Dx5jbiy3.js} +1 -1
- langflow/frontend/assets/{Wolfram-DytXC9hF.js → Wolfram-CIyonzwo.js} +1 -1
- langflow/frontend/assets/{index-DPX6X_bw.js → index-0XQqYgdG.js} +1 -1
- langflow/frontend/assets/{index-DtJyCbzF.js → index-1Q3VBqKn.js} +1 -1
- langflow/frontend/assets/{index-DztLFiip.js → index-35sspuLu.js} +1 -1
- langflow/frontend/assets/{index-BeNby7qF.js → index-7hzXChQz.js} +1 -1
- langflow/frontend/assets/{index-BOEf7-ty.js → index-8cuhogZP.js} +1 -1
- langflow/frontend/assets/{index-D0s9f6Re.js → index-B0m53xKd.js} +1 -1
- langflow/frontend/assets/{index-DpJiH-Rk.js → index-B1XqWJhG.js} +1 -1
- langflow/frontend/assets/{index-DuAeoC-H.js → index-B3KCdQ91.js} +1 -1
- langflow/frontend/assets/{index-Bxml6wXu.js → index-B7uEuOPK.js} +1 -1
- langflow/frontend/assets/{index-CDFLVFB4.js → index-B8UR8v-Q.js} +1 -1
- langflow/frontend/assets/{index-ci4XHjbJ.js → index-BD7Io1hL.js} +6 -6
- langflow/frontend/assets/{index-DasrI03Y.js → index-BDQrd7Tj.js} +1 -1
- langflow/frontend/assets/{index-CkQ-bJ4G.js → index-BDuk0d7P.js} +1 -1
- langflow/frontend/assets/{index-C_1RBTul.js → index-BFQ8KFK0.js} +1 -1
- langflow/frontend/assets/{index-DqSH4x-R.js → index-BFf0HTFI.js} +1 -1
- langflow/frontend/assets/{index-BXMhmvTj.js → index-BHhnpSkW.js} +1 -1
- langflow/frontend/assets/{index-Uq2ij_SS.js → index-BKKrUElc.js} +1 -1
- langflow/frontend/assets/{index-3TJWUdmx.js → index-BKeZt2hQ.js} +1 -1
- langflow/frontend/assets/{index-DHlEwAxb.js → index-BKlQbl-6.js} +1 -1
- langflow/frontend/assets/{index-Bisa4IQF.js → index-BLYw9MK2.js} +1 -1
- langflow/frontend/assets/{index-GODbXlHC.js → index-BLsVo9iW.js} +1 -1
- langflow/frontend/assets/{index-CHFO5O4g.js → index-BNQIbda3.js} +1 -1
- langflow/frontend/assets/{index-3uOAA_XX.js → index-BPR2mEFC.js} +1 -1
- langflow/frontend/assets/{index-3qMh9x6K.js → index-BPfdqCc_.js} +1 -1
- langflow/frontend/assets/{index-rcdQpNcU.js → index-BQrVDjR1.js} +1 -1
- langflow/frontend/assets/{index-4eRtaV45.js → index-BRmSeoWR.js} +1 -1
- langflow/frontend/assets/{index-Ct9_T9ox.js → index-BUse-kxM.js} +1 -1
- langflow/frontend/assets/{index-BdYgKk1d.js → index-BVFaF7HW.js} +1 -1
- langflow/frontend/assets/{index-CWWo2zOA.js → index-BWgIWfv2.js} +1 -1
- langflow/frontend/assets/{index-Du9aJK7m.js → index-BWt5xGeA.js} +1 -1
- langflow/frontend/assets/{index-Baka5dKE.js → index-BYhcGLTV.js} +1 -1
- langflow/frontend/assets/{index-BWq9GTzt.js → index-BYjw7Gk3.js} +1 -1
- langflow/frontend/assets/{index-r1LZg-PY.js → index-BZFljdMa.js} +1 -1
- langflow/frontend/assets/index-BcAgItH4.js +1 -0
- langflow/frontend/assets/{index-B8TlNgn-.js → index-Bct1s6__.js} +1 -1
- langflow/frontend/assets/{index-DZzbmg3J.js → index-Bhv79Zso.js} +1 -1
- langflow/frontend/assets/{index-CqDUqHfd.js → index-Bj3lSwvZ.js} +1 -1
- langflow/frontend/assets/{index-dkS0ek2S.js → index-Bk4mTwnI.js} +1 -1
- langflow/frontend/assets/{index-tOy_uloT.js → index-BmIx1cws.js} +1 -1
- langflow/frontend/assets/{index-BVtf6m9S.js → index-BmYJJ5YS.js} +1 -1
- langflow/frontend/assets/{index-mBjJYD9q.js → index-BnAFhkSN.js} +1 -1
- langflow/frontend/assets/{index-Ba3RTMXI.js → index-Bo-ww0Bb.js} +1 -1
- langflow/frontend/assets/{index-BsBWP-Dh.js → index-BpmqDOeZ.js} +1 -1
- langflow/frontend/assets/{index-BqUeOc7Y.js → index-BrVhdPZb.js} +1 -1
- langflow/frontend/assets/{index-DWkMJnbd.js → index-BvGQfVBD.js} +1 -1
- langflow/frontend/assets/{index-DdzVmJHE.js → index-Bwi4flFg.js} +1 -1
- langflow/frontend/assets/{index-Ccb5B8zG.js → index-BzoRPtTY.js} +1 -1
- langflow/frontend/assets/{index-Ym6gz0T6.js → index-C--IDAyc.js} +1 -1
- langflow/frontend/assets/{index-CvQ0w8Pj.js → index-C0E3_MIK.js} +1 -1
- langflow/frontend/assets/{index-DxIs8VSp.js → index-C27Jj_26.js} +1 -1
- langflow/frontend/assets/{index-BxWXWRmZ.js → index-C2eQmQsn.js} +1 -1
- langflow/frontend/assets/{index-B536IPXH.js → index-C8K0r39B.js} +1 -1
- langflow/frontend/assets/{index-BEDxAk3N.js → index-CEJNWPhA.js} +1 -1
- langflow/frontend/assets/{index-G_U_kPAd.js → index-CFNTYfFK.js} +1 -1
- langflow/frontend/assets/{index-CMGZGIx_.js → index-CMHpjHZl.js} +1 -1
- langflow/frontend/assets/{index-C76aBV_h.js → index-CSu8KHOi.js} +1 -1
- langflow/frontend/assets/{index-B-c82Fnu.js → index-CUKmGsI6.js} +1 -1
- langflow/frontend/assets/{index-DX7XsAcx.js → index-CWYiSeWV.js} +1 -1
- langflow/frontend/assets/{index-COL0eiWI.js → index-CY7_TBTC.js} +1 -1
- langflow/frontend/assets/{index-BlBl2tvQ.js → index-CbnWRlYY.js} +1 -1
- langflow/frontend/assets/{index-BQB-iDYl.js → index-CfPBgkqg.js} +1 -1
- langflow/frontend/assets/{index-DWr_zPkx.js → index-Cg53lrYh.js} +1 -1
- langflow/frontend/assets/{index-BcgB3rXH.js → index-CgU7KF4I.js} +1 -1
- langflow/frontend/assets/{index-CkSzjCqM.js → index-CgwykVGh.js} +1 -1
- langflow/frontend/assets/{index-BbsND1Qg.js → index-Ch5r0oW6.js} +1 -1
- langflow/frontend/assets/{index-AY5Dm2mG.js → index-CjsommIr.js} +1 -1
- langflow/frontend/assets/{index-BtJ2o21k.js → index-CkK25zZO.js} +1 -1
- langflow/frontend/assets/{index-BKvKC-12.js → index-CkjwSTSM.js} +1 -1
- langflow/frontend/assets/{index-BVHvIhT5.js → index-CmSFKgiD.js} +1 -1
- langflow/frontend/assets/{index-D-zkHcob.js → index-Cr5v2ave.js} +1 -1
- langflow/frontend/assets/{index-js8ceOaP.js → index-CrAF-31Y.js} +1 -1
- langflow/frontend/assets/{index-BNbWMmAV.js → index-CsLQiWNf.js} +1 -1
- langflow/frontend/assets/{index-VcXZzovW.js → index-CuCM7Wu7.js} +1 -1
- langflow/frontend/assets/{index-DzeIsaBm.js → index-Cxy9sEpy.js} +1 -1
- langflow/frontend/assets/{index-LrMzDsq9.js → index-CyP3py8K.js} +1 -1
- langflow/frontend/assets/{index-C8KD3LPb.js → index-CzHzeZuA.js} +1 -1
- langflow/frontend/assets/{index-DS1EgA10.js → index-D1oynC8a.js} +1 -1
- langflow/frontend/assets/{index-ByFXr9Iq.js → index-D4tjMhfY.js} +1 -1
- langflow/frontend/assets/{index-DyJDHm2D.js → index-D6CSIrp1.js} +1 -1
- langflow/frontend/assets/{index-DIqSyDVO.js → index-D9kwEzPB.js} +1 -1
- langflow/frontend/assets/{index-D5PeCofu.js → index-DDXsm8tz.js} +1 -1
- langflow/frontend/assets/{index-CJwYfDBz.js → index-DDhJVVel.js} +1 -1
- langflow/frontend/assets/{index-C7x9R_Yo.js → index-DH6o91_s.js} +1 -1
- langflow/frontend/assets/{index-DpQKtcXu.js → index-DHngW1k8.js} +1 -1
- langflow/frontend/assets/{index-VZnN0P6C.js → index-DIKUsGLF.js} +1 -1
- langflow/frontend/assets/{index-VHmUHUUU.js → index-DJESSNJi.js} +1 -1
- langflow/frontend/assets/{index-BdIWbCEL.js → index-DMCWDJOl.js} +1 -1
- langflow/frontend/assets/{index-DK8vNpXK.js → index-DOEvKC2X.js} +1 -1
- langflow/frontend/assets/{index-C7V5U9yH.js → index-DOQDkSoK.js} +1 -1
- langflow/frontend/assets/{index-D0HmkH0H.js → index-DXAfIEvs.js} +1 -1
- langflow/frontend/assets/{index-C9N80hP8.js → index-DZP_SaHb.js} +1 -1
- langflow/frontend/assets/{index-B2ggrBuR.js → index-DZxUIhWh.js} +1 -1
- langflow/frontend/assets/{index-DS9I4y48.js → index-Dda2u_yz.js} +1 -1
- langflow/frontend/assets/{index-BLROcaSz.js → index-Dg8N3NSO.js} +1 -1
- langflow/frontend/assets/{index-Dpz3oBf5.js → index-DkGhPNeA.js} +1 -1
- langflow/frontend/assets/{index-BnLT29qW.js → index-Dka_Rk4-.js} +1 -1
- langflow/frontend/assets/{index-B5ed-sAv.js → index-DljpLeCW.js} +1 -1
- langflow/frontend/assets/{index-Cx__T92e.js → index-DnVYJtVO.js} +1 -1
- langflow/frontend/assets/{index-hOkEW3JP.js → index-DqbzUcI5.js} +1 -1
- langflow/frontend/assets/{index-BxkZkBgQ.js → index-Dr6pVDPI.js} +1 -1
- langflow/frontend/assets/{index-BIkqesA-.js → index-DsoX2o1S.js} +1 -1
- langflow/frontend/assets/{index-Cpgkb0Q3.js → index-DwfHWnX7.js} +1 -1
- langflow/frontend/assets/{index-B9Mo3ndZ.js → index-Dx-Z87KT.js} +1 -1
- langflow/frontend/assets/{index-R7q8cAek.js → index-DyqITq51.js} +1 -1
- langflow/frontend/assets/{index-DKEXZFUO.js → index-DzIv3RyR.js} +1 -1
- langflow/frontend/assets/{index-BJrY2Fiu.js → index-G4ro0MjT.js} +1 -1
- langflow/frontend/assets/{index-IFGgPiye.js → index-H7J7w7fa.js} +1 -1
- langflow/frontend/assets/{index-lKEJpUsF.js → index-KWY77KfV.js} +1 -1
- langflow/frontend/assets/{index-DDNNv4C0.js → index-U9GWm1eH.js} +1 -1
- langflow/frontend/assets/{index-BRWNIt9F.js → index-Un9pWxnP.js} +1 -1
- langflow/frontend/assets/{index-BCK-ZyIh.js → index-Xi4TplbI.js} +1 -1
- langflow/frontend/assets/{index-BEKoRwsX.js → index-_cbGmjF4.js} +1 -1
- langflow/frontend/assets/{index-7xXgqu09.js → index-cEXY6V06.js} +1 -1
- langflow/frontend/assets/{index-D87Zw62M.js → index-dyXKnkMi.js} +1 -1
- langflow/frontend/assets/{index-CG7cp0nD.js → index-eUkS6iJM.js} +1 -1
- langflow/frontend/assets/{index-CoUlHbtg.js → index-ekfMOqrF.js} +1 -1
- langflow/frontend/assets/{index-DhzEUXfr.js → index-gdb7XMS8.js} +1 -1
- langflow/frontend/assets/{index-D9eflZfP.js → index-hZUcL0MZ.js} +1 -1
- langflow/frontend/assets/{index-CwIxqYlT.js → index-kkA-qHB_.js} +1 -1
- langflow/frontend/assets/{index-sS6XLk3j.js → index-mzl9ULw5.js} +1 -1
- langflow/frontend/assets/{index-BjENqyKe.js → index-oxHBZk2v.js} +1 -1
- langflow/frontend/assets/{index-BejHxU5W.js → index-p2kStSPe.js} +1 -1
- langflow/frontend/assets/{index-BOYTBrh9.js → index-paQEWYGT.js} +1 -1
- langflow/frontend/assets/{index-Cd5zuUUK.js → index-r_8gs4nL.js} +1 -1
- langflow/frontend/assets/{index-AlJ7td-D.js → index-uiKla4UR.js} +1 -1
- langflow/frontend/assets/{index-B8y58M9b.js → index-vJOO5U8M.js} +1 -1
- langflow/frontend/assets/{index-CF4dtI6S.js → index-w72fDjpG.js} +1 -1
- langflow/frontend/assets/{index-C2Xd7UkR.js → index-zV82kQ6k.js} +1 -1
- langflow/frontend/assets/lazyIconImports-DTNgvPE-.js +2 -0
- langflow/frontend/assets/{use-post-add-user-HN0rRnhv.js → use-post-add-user-CvtuazTg.js} +1 -1
- langflow/frontend/index.html +1 -1
- langflow/graph/edge/base.py +2 -3
- langflow/graph/graph/base.py +14 -12
- langflow/graph/graph/constants.py +3 -0
- langflow/graph/utils.py +6 -6
- langflow/graph/vertex/base.py +4 -5
- langflow/graph/vertex/param_handler.py +1 -1
- langflow/graph/vertex/vertex_types.py +2 -2
- langflow/helpers/flow.py +1 -1
- langflow/initial_setup/setup.py +32 -30
- langflow/initial_setup/starter_projects/Blog Writer.json +2 -2
- langflow/initial_setup/starter_projects/Custom Component Generator.json +2 -2
- langflow/initial_setup/starter_projects/Document Q&A.json +1 -1
- langflow/initial_setup/starter_projects/Hybrid Search RAG.json +2 -2
- langflow/initial_setup/starter_projects/Instagram Copywriter.json +3 -3
- langflow/initial_setup/starter_projects/Invoice Summarizer.json +1 -1
- langflow/initial_setup/starter_projects/Knowledge Ingestion.json +4 -4
- langflow/initial_setup/starter_projects/Knowledge Retrieval.json +2 -2
- langflow/initial_setup/starter_projects/Market Research.json +3 -3
- langflow/initial_setup/starter_projects/Meeting Summary.json +6 -6
- langflow/initial_setup/starter_projects/Memory Chatbot.json +2 -2
- langflow/initial_setup/starter_projects/News Aggregator.json +5 -22
- langflow/initial_setup/starter_projects/Nvidia Remix.json +3 -20
- langflow/initial_setup/starter_projects/Pok/303/251dex Agent.json" +1 -1
- langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +1 -1
- langflow/initial_setup/starter_projects/Price Deal Finder.json +5 -5
- langflow/initial_setup/starter_projects/Research Agent.json +3 -3
- langflow/initial_setup/starter_projects/SaaS Pricing.json +1 -1
- langflow/initial_setup/starter_projects/Search agent.json +1 -1
- langflow/initial_setup/starter_projects/Sequential Tasks Agents.json +7 -7
- langflow/initial_setup/starter_projects/Simple Agent.json +3 -3
- langflow/initial_setup/starter_projects/Social Media Agent.json +1 -1
- langflow/initial_setup/starter_projects/Text Sentiment Analysis.json +1 -1
- langflow/initial_setup/starter_projects/Travel Planning Agents.json +3 -3
- langflow/initial_setup/starter_projects/Vector Store RAG.json +5 -5
- langflow/initial_setup/starter_projects/Youtube Analysis.json +3 -3
- langflow/interface/components.py +23 -22
- langflow/interface/initialize/loading.py +5 -5
- langflow/interface/run.py +1 -1
- langflow/interface/utils.py +1 -1
- langflow/io/__init__.py +0 -1
- langflow/langflow_launcher.py +1 -1
- langflow/load/load.py +2 -7
- langflow/logging/__init__.py +0 -1
- langflow/logging/logger.py +191 -115
- langflow/logging/setup.py +1 -1
- langflow/main.py +37 -52
- langflow/memory.py +7 -7
- langflow/middleware.py +1 -1
- langflow/processing/process.py +4 -4
- langflow/schema/artifact.py +2 -2
- langflow/schema/data.py +10 -2
- langflow/schema/dataframe.py +1 -1
- langflow/schema/message.py +1 -1
- langflow/serialization/serialization.py +1 -1
- langflow/services/auth/utils.py +2 -2
- langflow/services/cache/disk.py +1 -1
- langflow/services/cache/service.py +3 -3
- langflow/services/database/models/flow/model.py +2 -7
- langflow/services/database/models/transactions/crud.py +2 -2
- langflow/services/database/models/user/crud.py +2 -2
- langflow/services/database/service.py +8 -8
- langflow/services/database/utils.py +6 -5
- langflow/services/deps.py +2 -3
- langflow/services/factory.py +1 -1
- langflow/services/flow/flow_runner.py +7 -12
- langflow/services/job_queue/service.py +16 -15
- langflow/services/manager.py +3 -4
- langflow/services/settings/auth.py +1 -1
- langflow/services/settings/base.py +3 -8
- langflow/services/settings/manager.py +1 -1
- langflow/services/settings/utils.py +1 -1
- langflow/services/socket/__init__.py +0 -1
- langflow/services/socket/service.py +3 -3
- langflow/services/socket/utils.py +4 -4
- langflow/services/state/service.py +1 -2
- langflow/services/storage/factory.py +1 -1
- langflow/services/storage/local.py +9 -8
- langflow/services/storage/s3.py +11 -10
- langflow/services/store/service.py +3 -3
- langflow/services/store/utils.py +3 -2
- langflow/services/task/temp_flow_cleanup.py +7 -7
- langflow/services/telemetry/service.py +10 -10
- langflow/services/tracing/arize_phoenix.py +2 -2
- langflow/services/tracing/langfuse.py +1 -1
- langflow/services/tracing/langsmith.py +1 -1
- langflow/services/tracing/langwatch.py +1 -1
- langflow/services/tracing/opik.py +1 -1
- langflow/services/tracing/service.py +25 -6
- langflow/services/tracing/traceloop.py +245 -0
- langflow/services/utils.py +7 -7
- langflow/services/variable/kubernetes.py +3 -3
- langflow/services/variable/kubernetes_secrets.py +2 -1
- langflow/services/variable/service.py +5 -5
- langflow/utils/component_utils.py +9 -6
- langflow/utils/util.py +5 -5
- langflow/utils/validate.py +3 -3
- langflow/utils/voice_utils.py +2 -2
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/METADATA +2 -1
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/RECORD +342 -340
- langflow/frontend/assets/lazyIconImports-Bh1TFfvH.js +0 -2
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/WHEEL +0 -0
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/entry_points.txt +0 -0
|
@@ -1,13 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import signal
|
|
4
|
-
import sys
|
|
5
|
-
import traceback
|
|
6
|
-
from contextlib import suppress
|
|
7
3
|
from typing import TYPE_CHECKING, Any
|
|
8
4
|
|
|
9
|
-
from loguru import logger
|
|
10
|
-
|
|
11
5
|
from langflow.components._importing import import_mod
|
|
12
6
|
|
|
13
7
|
if TYPE_CHECKING:
|
|
@@ -47,195 +41,3 @@ def __getattr__(attr_name: str) -> Any:
|
|
|
47
41
|
|
|
48
42
|
def __dir__() -> list[str]:
|
|
49
43
|
return list(__all__)
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
def docling_worker(file_paths: list[str], queue, pipeline: str, ocr_engine: str):
|
|
53
|
-
"""Worker function for processing files with Docling in a separate process."""
|
|
54
|
-
# Signal handling for graceful shutdown
|
|
55
|
-
shutdown_requested = False
|
|
56
|
-
|
|
57
|
-
def signal_handler(signum: int, frame) -> None: # noqa: ARG001
|
|
58
|
-
"""Handle shutdown signals gracefully."""
|
|
59
|
-
nonlocal shutdown_requested
|
|
60
|
-
signal_names: dict[int, str] = {signal.SIGTERM: "SIGTERM", signal.SIGINT: "SIGINT"}
|
|
61
|
-
signal_name = signal_names.get(signum, f"signal {signum}")
|
|
62
|
-
|
|
63
|
-
logger.debug(f"Docling worker received {signal_name}, initiating graceful shutdown...")
|
|
64
|
-
shutdown_requested = True
|
|
65
|
-
|
|
66
|
-
# Send shutdown notification to parent process
|
|
67
|
-
with suppress(Exception):
|
|
68
|
-
queue.put({"error": f"Worker interrupted by {signal_name}", "shutdown": True})
|
|
69
|
-
|
|
70
|
-
# Exit gracefully
|
|
71
|
-
sys.exit(0)
|
|
72
|
-
|
|
73
|
-
def check_shutdown() -> None:
|
|
74
|
-
"""Check if shutdown was requested and exit if so."""
|
|
75
|
-
if shutdown_requested:
|
|
76
|
-
logger.info("Shutdown requested, exiting worker...")
|
|
77
|
-
|
|
78
|
-
with suppress(Exception):
|
|
79
|
-
queue.put({"error": "Worker shutdown requested", "shutdown": True})
|
|
80
|
-
|
|
81
|
-
sys.exit(0)
|
|
82
|
-
|
|
83
|
-
# Register signal handlers early
|
|
84
|
-
try:
|
|
85
|
-
signal.signal(signal.SIGTERM, signal_handler)
|
|
86
|
-
signal.signal(signal.SIGINT, signal_handler)
|
|
87
|
-
logger.debug("Signal handlers registered for graceful shutdown")
|
|
88
|
-
except (OSError, ValueError) as e:
|
|
89
|
-
# Some signals might not be available on all platforms
|
|
90
|
-
logger.warning(f"Warning: Could not register signal handlers: {e}")
|
|
91
|
-
|
|
92
|
-
# Check for shutdown before heavy imports
|
|
93
|
-
check_shutdown()
|
|
94
|
-
|
|
95
|
-
try:
|
|
96
|
-
from docling.datamodel.base_models import ConversionStatus, InputFormat
|
|
97
|
-
from docling.datamodel.pipeline_options import (
|
|
98
|
-
OcrOptions,
|
|
99
|
-
PdfPipelineOptions,
|
|
100
|
-
VlmPipelineOptions,
|
|
101
|
-
)
|
|
102
|
-
from docling.document_converter import DocumentConverter, FormatOption, PdfFormatOption
|
|
103
|
-
from docling.models.factories import get_ocr_factory
|
|
104
|
-
from docling.pipeline.vlm_pipeline import VlmPipeline
|
|
105
|
-
|
|
106
|
-
# Check for shutdown after imports
|
|
107
|
-
check_shutdown()
|
|
108
|
-
logger.debug("Docling dependencies loaded successfully")
|
|
109
|
-
|
|
110
|
-
except ModuleNotFoundError:
|
|
111
|
-
msg = (
|
|
112
|
-
"Docling is an optional dependency of Langflow. "
|
|
113
|
-
"Install with `uv pip install 'langflow[docling]'` "
|
|
114
|
-
"or refer to the documentation"
|
|
115
|
-
)
|
|
116
|
-
queue.put({"error": msg})
|
|
117
|
-
return
|
|
118
|
-
except ImportError as e:
|
|
119
|
-
# A different import failed (e.g., a transitive dependency); preserve details.
|
|
120
|
-
queue.put({"error": f"Failed to import a Docling dependency: {e}"})
|
|
121
|
-
return
|
|
122
|
-
except KeyboardInterrupt:
|
|
123
|
-
logger.warning("KeyboardInterrupt during imports, exiting...")
|
|
124
|
-
queue.put({"error": "Worker interrupted during imports", "shutdown": True})
|
|
125
|
-
return
|
|
126
|
-
|
|
127
|
-
# Configure the standard PDF pipeline
|
|
128
|
-
def _get_standard_opts() -> PdfPipelineOptions:
|
|
129
|
-
check_shutdown() # Check before heavy operations
|
|
130
|
-
|
|
131
|
-
pipeline_options = PdfPipelineOptions()
|
|
132
|
-
pipeline_options.do_ocr = ocr_engine != ""
|
|
133
|
-
if pipeline_options.do_ocr:
|
|
134
|
-
ocr_factory = get_ocr_factory(
|
|
135
|
-
allow_external_plugins=False,
|
|
136
|
-
)
|
|
137
|
-
|
|
138
|
-
ocr_options: OcrOptions = ocr_factory.create_options(
|
|
139
|
-
kind=ocr_engine,
|
|
140
|
-
)
|
|
141
|
-
pipeline_options.ocr_options = ocr_options
|
|
142
|
-
return pipeline_options
|
|
143
|
-
|
|
144
|
-
# Configure the VLM pipeline
|
|
145
|
-
def _get_vlm_opts() -> VlmPipelineOptions:
|
|
146
|
-
check_shutdown() # Check before heavy operations
|
|
147
|
-
return VlmPipelineOptions()
|
|
148
|
-
|
|
149
|
-
# Configure the main format options and create the DocumentConverter()
|
|
150
|
-
def _get_converter() -> DocumentConverter:
|
|
151
|
-
check_shutdown() # Check before heavy operations
|
|
152
|
-
|
|
153
|
-
if pipeline == "standard":
|
|
154
|
-
pdf_format_option = PdfFormatOption(
|
|
155
|
-
pipeline_options=_get_standard_opts(),
|
|
156
|
-
)
|
|
157
|
-
elif pipeline == "vlm":
|
|
158
|
-
pdf_format_option = PdfFormatOption(pipeline_cls=VlmPipeline, pipeline_options=_get_vlm_opts())
|
|
159
|
-
else:
|
|
160
|
-
msg = f"Unknown pipeline: {pipeline!r}"
|
|
161
|
-
raise ValueError(msg)
|
|
162
|
-
|
|
163
|
-
format_options: dict[InputFormat, FormatOption] = {
|
|
164
|
-
InputFormat.PDF: pdf_format_option,
|
|
165
|
-
InputFormat.IMAGE: pdf_format_option,
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
return DocumentConverter(format_options=format_options)
|
|
169
|
-
|
|
170
|
-
try:
|
|
171
|
-
# Check for shutdown before creating converter (can be slow)
|
|
172
|
-
check_shutdown()
|
|
173
|
-
logger.info(f"Initializing {pipeline} pipeline with OCR: {ocr_engine or 'disabled'}")
|
|
174
|
-
|
|
175
|
-
converter = _get_converter()
|
|
176
|
-
|
|
177
|
-
# Check for shutdown before processing files
|
|
178
|
-
check_shutdown()
|
|
179
|
-
logger.info(f"Starting to process {len(file_paths)} files...")
|
|
180
|
-
|
|
181
|
-
# Process files with periodic shutdown checks
|
|
182
|
-
results = []
|
|
183
|
-
for i, file_path in enumerate(file_paths):
|
|
184
|
-
# Check for shutdown before processing each file
|
|
185
|
-
check_shutdown()
|
|
186
|
-
|
|
187
|
-
logger.debug(f"Processing file {i + 1}/{len(file_paths)}: {file_path}")
|
|
188
|
-
|
|
189
|
-
try:
|
|
190
|
-
# Process single file (we can't easily interrupt convert_all)
|
|
191
|
-
single_result = converter.convert_all([file_path])
|
|
192
|
-
results.extend(single_result)
|
|
193
|
-
|
|
194
|
-
# Check for shutdown after each file
|
|
195
|
-
check_shutdown()
|
|
196
|
-
|
|
197
|
-
except (OSError, ValueError, RuntimeError, ImportError) as file_error:
|
|
198
|
-
# Handle specific file processing errors
|
|
199
|
-
logger.error(f"Error processing file {file_path}: {file_error}")
|
|
200
|
-
# Continue with other files, but check for shutdown
|
|
201
|
-
check_shutdown()
|
|
202
|
-
except Exception as file_error: # noqa: BLE001
|
|
203
|
-
# Catch any other unexpected errors to prevent worker crash
|
|
204
|
-
logger.error(f"Unexpected error processing file {file_path}: {file_error}")
|
|
205
|
-
# Continue with other files, but check for shutdown
|
|
206
|
-
check_shutdown()
|
|
207
|
-
|
|
208
|
-
# Final shutdown check before sending results
|
|
209
|
-
check_shutdown()
|
|
210
|
-
|
|
211
|
-
# Process the results while maintaining the original structure
|
|
212
|
-
processed_data = [
|
|
213
|
-
{"document": res.document, "file_path": str(res.input.file), "status": res.status.name}
|
|
214
|
-
if res.status == ConversionStatus.SUCCESS
|
|
215
|
-
else None
|
|
216
|
-
for res in results
|
|
217
|
-
]
|
|
218
|
-
|
|
219
|
-
logger.info(f"Successfully processed {len([d for d in processed_data if d])} files")
|
|
220
|
-
queue.put(processed_data)
|
|
221
|
-
|
|
222
|
-
except KeyboardInterrupt:
|
|
223
|
-
logger.warning("KeyboardInterrupt during processing, exiting gracefully...")
|
|
224
|
-
queue.put({"error": "Worker interrupted during processing", "shutdown": True})
|
|
225
|
-
return
|
|
226
|
-
except Exception as e: # noqa: BLE001
|
|
227
|
-
if shutdown_requested:
|
|
228
|
-
logger.exception("Exception occurred during shutdown, exiting...")
|
|
229
|
-
return
|
|
230
|
-
|
|
231
|
-
# Send any processing error to the main process with traceback
|
|
232
|
-
error_info = {"error": str(e), "traceback": traceback.format_exc()}
|
|
233
|
-
logger.error(f"Error in worker: {error_info}")
|
|
234
|
-
queue.put(error_info)
|
|
235
|
-
finally:
|
|
236
|
-
logger.info("Docling worker finishing...")
|
|
237
|
-
# Ensure we don't leave any hanging processes
|
|
238
|
-
if shutdown_requested:
|
|
239
|
-
logger.debug("Worker shutdown completed")
|
|
240
|
-
else:
|
|
241
|
-
logger.debug("Worker completed normally")
|
|
@@ -3,7 +3,7 @@ from multiprocessing import Queue, get_context
|
|
|
3
3
|
from queue import Empty
|
|
4
4
|
|
|
5
5
|
from langflow.base.data import BaseFileComponent
|
|
6
|
-
from langflow.
|
|
6
|
+
from langflow.base.data.docling_utils import docling_worker
|
|
7
7
|
from langflow.inputs import DropdownInput
|
|
8
8
|
from langflow.schema import Data
|
|
9
9
|
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import logging
|
|
2
1
|
from typing import TYPE_CHECKING
|
|
3
2
|
|
|
4
3
|
from langflow.custom.custom_component.component import Component
|
|
5
4
|
from langflow.io import HandleInput, MessageInput, Output
|
|
5
|
+
from langflow.logging.logger import logger
|
|
6
6
|
from langflow.schema.data import Data
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
@@ -57,8 +57,8 @@ class TextEmbedderComponent(Component):
|
|
|
57
57
|
embedding_vector = embeddings[0]
|
|
58
58
|
self.status = {"text": text_content, "embeddings": embedding_vector}
|
|
59
59
|
return Data(data={"text": text_content, "embeddings": embedding_vector})
|
|
60
|
-
except Exception as e:
|
|
61
|
-
|
|
60
|
+
except Exception as e: # noqa: BLE001
|
|
61
|
+
logger.exception("Error generating embeddings")
|
|
62
62
|
error_data = Data(data={"text": "", "embeddings": [], "error": str(e)})
|
|
63
63
|
self.status = {"error": str(e)}
|
|
64
64
|
return error_data
|
|
@@ -1,13 +1,6 @@
|
|
|
1
|
-
from loguru import logger
|
|
2
|
-
|
|
3
1
|
from langflow.custom.custom_component.component import Component
|
|
4
|
-
from langflow.io import
|
|
5
|
-
|
|
6
|
-
DataInput,
|
|
7
|
-
MultilineInput,
|
|
8
|
-
Output,
|
|
9
|
-
SecretStrInput,
|
|
10
|
-
)
|
|
2
|
+
from langflow.io import BoolInput, DataInput, MultilineInput, Output, SecretStrInput
|
|
3
|
+
from langflow.logging.logger import logger
|
|
11
4
|
from langflow.schema.data import Data
|
|
12
5
|
|
|
13
6
|
|
|
@@ -11,11 +11,11 @@ from googleapiclient.discovery import build
|
|
|
11
11
|
from langchain_core.chat_sessions import ChatSession
|
|
12
12
|
from langchain_core.messages import HumanMessage
|
|
13
13
|
from langchain_google_community.gmail.loader import GMailLoader
|
|
14
|
-
from loguru import logger
|
|
15
14
|
|
|
16
15
|
from langflow.custom.custom_component.component import Component
|
|
17
16
|
from langflow.inputs.inputs import MessageTextInput
|
|
18
17
|
from langflow.io import SecretStrInput
|
|
18
|
+
from langflow.logging.logger import logger
|
|
19
19
|
from langflow.schema.data import Data
|
|
20
20
|
from langflow.template.field.base import Output
|
|
21
21
|
|
|
@@ -1,21 +1,14 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
3
|
import requests
|
|
4
|
-
from loguru import logger
|
|
5
4
|
from pydantic.v1 import SecretStr
|
|
6
5
|
|
|
7
6
|
from langflow.base.models.google_generative_ai_constants import GOOGLE_GENERATIVE_AI_MODELS
|
|
8
7
|
from langflow.base.models.model import LCModelComponent
|
|
9
8
|
from langflow.field_typing import LanguageModel
|
|
10
9
|
from langflow.field_typing.range_spec import RangeSpec
|
|
11
|
-
from langflow.inputs.inputs import
|
|
12
|
-
|
|
13
|
-
DropdownInput,
|
|
14
|
-
FloatInput,
|
|
15
|
-
IntInput,
|
|
16
|
-
SecretStrInput,
|
|
17
|
-
SliderInput,
|
|
18
|
-
)
|
|
10
|
+
from langflow.inputs.inputs import BoolInput, DropdownInput, FloatInput, IntInput, SecretStrInput, SliderInput
|
|
11
|
+
from langflow.logging.logger import logger
|
|
19
12
|
from langflow.schema.dotdict import dotdict
|
|
20
13
|
|
|
21
14
|
|
|
@@ -105,7 +98,7 @@ class GoogleGenerativeAIComponent(LCModelComponent):
|
|
|
105
98
|
google_api_key=SecretStr(google_api_key).get_secret_value(),
|
|
106
99
|
)
|
|
107
100
|
|
|
108
|
-
def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:
|
|
101
|
+
def get_models(self, *, tool_model_enabled: bool | None = None) -> list[str]:
|
|
109
102
|
try:
|
|
110
103
|
import google.generativeai as genai
|
|
111
104
|
|
|
@@ -145,8 +138,9 @@ class GoogleGenerativeAIComponent(LCModelComponent):
|
|
|
145
138
|
except (ImportError, ValueError, requests.exceptions.RequestException) as e:
|
|
146
139
|
logger.exception(f"Error getting model names: {e}")
|
|
147
140
|
ids = GOOGLE_GENERATIVE_AI_MODELS
|
|
141
|
+
build_config.setdefault("model_name", {})
|
|
148
142
|
build_config["model_name"]["options"] = ids
|
|
149
|
-
build_config["model_name"]
|
|
143
|
+
build_config["model_name"].setdefault("value", ids[0])
|
|
150
144
|
except Exception as e:
|
|
151
145
|
msg = f"Error getting model names: {e}"
|
|
152
146
|
raise ValueError(msg) from e
|
langflow/components/groq/groq.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import requests
|
|
2
|
-
from loguru import logger
|
|
3
2
|
from pydantic.v1 import SecretStr
|
|
4
3
|
|
|
5
4
|
from langflow.base.models.groq_constants import (
|
|
@@ -11,6 +10,7 @@ from langflow.base.models.model import LCModelComponent
|
|
|
11
10
|
from langflow.field_typing import LanguageModel
|
|
12
11
|
from langflow.field_typing.range_spec import RangeSpec
|
|
13
12
|
from langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput
|
|
13
|
+
from langflow.logging.logger import logger
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class GroqModel(LCModelComponent):
|
|
@@ -74,7 +74,7 @@ class GroqModel(LCModelComponent):
|
|
|
74
74
|
),
|
|
75
75
|
]
|
|
76
76
|
|
|
77
|
-
def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:
|
|
77
|
+
def get_models(self, *, tool_model_enabled: bool | None = None) -> list[str]:
|
|
78
78
|
try:
|
|
79
79
|
url = f"{self.base_url}/openai/v1/models"
|
|
80
80
|
headers = {"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"}
|
|
@@ -114,8 +114,9 @@ class GroqModel(LCModelComponent):
|
|
|
114
114
|
except (ImportError, ValueError, requests.exceptions.RequestException) as e:
|
|
115
115
|
logger.exception(f"Error getting model names: {e}")
|
|
116
116
|
ids = GROQ_MODELS
|
|
117
|
+
build_config.setdefault("model_name", {})
|
|
117
118
|
build_config["model_name"]["options"] = ids
|
|
118
|
-
build_config["model_name"]
|
|
119
|
+
build_config["model_name"].setdefault("value", ids[0])
|
|
119
120
|
except Exception as e:
|
|
120
121
|
msg = f"Error getting model names: {e}"
|
|
121
122
|
raise ValueError(msg) from e
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
from zoneinfo import ZoneInfo, available_timezones
|
|
3
3
|
|
|
4
|
-
from loguru import logger
|
|
5
|
-
|
|
6
4
|
from langflow.custom.custom_component.component import Component
|
|
7
5
|
from langflow.io import DropdownInput, Output
|
|
6
|
+
from langflow.logging.logger import logger
|
|
8
7
|
from langflow.schema.message import Message
|
|
9
8
|
|
|
10
9
|
|
|
@@ -37,7 +36,7 @@ class CurrentDateComponent(Component):
|
|
|
37
36
|
self.status = result
|
|
38
37
|
return Message(text=result)
|
|
39
38
|
except Exception as e: # noqa: BLE001
|
|
40
|
-
logger.
|
|
39
|
+
logger.debug("Error getting current date", exc_info=True)
|
|
41
40
|
error_message = f"Error: {e}"
|
|
42
41
|
self.status = error_message
|
|
43
42
|
return Message(text=error_message)
|
|
@@ -220,7 +220,7 @@ class MemoryComponent(Component):
|
|
|
220
220
|
stored = stored[-n_messages:] if order == "ASC" else stored[:n_messages]
|
|
221
221
|
|
|
222
222
|
# self.status = stored
|
|
223
|
-
return cast(Data, stored)
|
|
223
|
+
return cast("Data", stored)
|
|
224
224
|
|
|
225
225
|
async def retrieve_messages_as_text(self) -> Message:
|
|
226
226
|
stored_text = data_to_text(self.template, await self.retrieve_messages())
|
|
@@ -3,13 +3,13 @@ from typing import Any
|
|
|
3
3
|
|
|
4
4
|
import requests
|
|
5
5
|
from langchain_ibm import ChatWatsonx
|
|
6
|
-
from loguru import logger
|
|
7
6
|
from pydantic.v1 import SecretStr
|
|
8
7
|
|
|
9
8
|
from langflow.base.models.model import LCModelComponent
|
|
10
9
|
from langflow.field_typing import LanguageModel
|
|
11
10
|
from langflow.field_typing.range_spec import RangeSpec
|
|
12
11
|
from langflow.inputs.inputs import BoolInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
|
|
12
|
+
from langflow.logging.logger import logger
|
|
13
13
|
from langflow.schema.dotdict import dotdict
|
|
14
14
|
|
|
15
15
|
|
|
@@ -4,12 +4,12 @@ import requests
|
|
|
4
4
|
from ibm_watsonx_ai import APIClient, Credentials
|
|
5
5
|
from ibm_watsonx_ai.metanames import EmbedTextParamsMetaNames
|
|
6
6
|
from langchain_ibm import WatsonxEmbeddings
|
|
7
|
-
from loguru import logger
|
|
8
7
|
from pydantic.v1 import SecretStr
|
|
9
8
|
|
|
10
9
|
from langflow.base.embeddings.model import LCEmbeddingsModel
|
|
11
10
|
from langflow.field_typing import Embeddings
|
|
12
11
|
from langflow.io import BoolInput, DropdownInput, IntInput, SecretStrInput, StrInput
|
|
12
|
+
from langflow.logging.logger import logger
|
|
13
13
|
from langflow.schema.dotdict import dotdict
|
|
14
14
|
|
|
15
15
|
|
|
@@ -3,7 +3,6 @@ import os
|
|
|
3
3
|
from typing import Any
|
|
4
4
|
|
|
5
5
|
import httpx
|
|
6
|
-
from loguru import logger
|
|
7
6
|
|
|
8
7
|
from langflow.base.langwatch.utils import get_cached_evaluators
|
|
9
8
|
from langflow.custom.custom_component.component import Component
|
|
@@ -18,6 +17,7 @@ from langflow.io import (
|
|
|
18
17
|
Output,
|
|
19
18
|
SecretStrInput,
|
|
20
19
|
)
|
|
20
|
+
from langflow.logging.logger import logger
|
|
21
21
|
from langflow.schema.data import Data
|
|
22
22
|
from langflow.schema.dotdict import dotdict
|
|
23
23
|
|
|
@@ -226,7 +226,7 @@ class LangWatchComponent(Component):
|
|
|
226
226
|
if not evaluator_name:
|
|
227
227
|
if self.evaluators:
|
|
228
228
|
evaluator_name = next(iter(self.evaluators))
|
|
229
|
-
logger.
|
|
229
|
+
await logger.ainfo(f"No evaluator was selected. Using default: {evaluator_name}")
|
|
230
230
|
else:
|
|
231
231
|
return Data(
|
|
232
232
|
data={"error": "No evaluator selected and no evaluators available. Please choose an evaluator."}
|
|
@@ -237,7 +237,7 @@ class LangWatchComponent(Component):
|
|
|
237
237
|
if not evaluator:
|
|
238
238
|
return Data(data={"error": f"Selected evaluator '{evaluator_name}' not found."})
|
|
239
239
|
|
|
240
|
-
logger.
|
|
240
|
+
await logger.ainfo(f"Evaluating with evaluator: {evaluator_name}")
|
|
241
241
|
|
|
242
242
|
endpoint = f"/api/evaluations/{evaluator_name}/evaluate"
|
|
243
243
|
url = f"{os.getenv('LANGWATCH_ENDPOINT', 'https://app.langwatch.ai')}{endpoint}"
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
3
|
from typing_extensions import override
|
|
5
4
|
|
|
6
5
|
from langflow.base.langchain_utilities.model import LCToolComponent
|
|
@@ -9,6 +8,7 @@ from langflow.field_typing import Tool
|
|
|
9
8
|
from langflow.graph.graph.base import Graph
|
|
10
9
|
from langflow.helpers.flow import get_flow_inputs
|
|
11
10
|
from langflow.io import BoolInput, DropdownInput, Output, StrInput
|
|
11
|
+
from langflow.logging.logger import logger
|
|
12
12
|
from langflow.schema.data import Data
|
|
13
13
|
from langflow.schema.dotdict import dotdict
|
|
14
14
|
|
|
@@ -91,7 +91,7 @@ class FlowToolComponent(LCToolComponent):
|
|
|
91
91
|
try:
|
|
92
92
|
graph.set_run_id(self.graph.run_id)
|
|
93
93
|
except Exception: # noqa: BLE001
|
|
94
|
-
logger.
|
|
94
|
+
logger.warning("Failed to set run_id", exc_info=True)
|
|
95
95
|
inputs = get_flow_inputs(graph)
|
|
96
96
|
tool_description = self.tool_description.strip() or flow_data.description
|
|
97
97
|
tool = FlowTool(
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
|
-
|
|
5
3
|
from langflow.base.tools.run_flow import RunFlowBaseComponent
|
|
6
4
|
from langflow.helpers.flow import run_flow
|
|
5
|
+
from langflow.logging.logger import logger
|
|
7
6
|
from langflow.schema.dotdict import dotdict
|
|
8
7
|
|
|
9
8
|
|
|
@@ -34,7 +33,7 @@ class RunFlowComponent(RunFlowBaseComponent):
|
|
|
34
33
|
build_config = self.update_build_config_from_graph(build_config, graph)
|
|
35
34
|
except Exception as e:
|
|
36
35
|
msg = f"Error building graph for flow {field_value}"
|
|
37
|
-
logger.
|
|
36
|
+
await logger.aexception(msg)
|
|
38
37
|
raise RuntimeError(msg) from e
|
|
39
38
|
return build_config
|
|
40
39
|
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
|
-
|
|
5
3
|
from langflow.base.flow_processing.utils import build_data_from_result_data
|
|
6
4
|
from langflow.custom.custom_component.component import Component
|
|
7
5
|
from langflow.graph.graph.base import Graph
|
|
8
6
|
from langflow.graph.vertex.base import Vertex
|
|
9
7
|
from langflow.helpers.flow import get_flow_inputs
|
|
10
8
|
from langflow.io import DropdownInput, Output
|
|
9
|
+
from langflow.logging.logger import logger
|
|
11
10
|
from langflow.schema.data import Data
|
|
12
11
|
from langflow.schema.dotdict import dotdict
|
|
13
12
|
|
|
@@ -41,11 +40,11 @@ class SubFlowComponent(Component):
|
|
|
41
40
|
try:
|
|
42
41
|
flow_data = await self.get_flow(field_value)
|
|
43
42
|
except Exception: # noqa: BLE001
|
|
44
|
-
logger.
|
|
43
|
+
await logger.aexception(f"Error getting flow {field_value}")
|
|
45
44
|
else:
|
|
46
45
|
if not flow_data:
|
|
47
46
|
msg = f"Flow {field_value} not found."
|
|
48
|
-
logger.
|
|
47
|
+
await logger.aerror(msg)
|
|
49
48
|
else:
|
|
50
49
|
try:
|
|
51
50
|
graph = Graph.from_payload(flow_data.data["data"])
|
|
@@ -54,7 +53,7 @@ class SubFlowComponent(Component):
|
|
|
54
53
|
# Add inputs to the build config
|
|
55
54
|
build_config = self.add_inputs_to_build_config(inputs, build_config)
|
|
56
55
|
except Exception: # noqa: BLE001
|
|
57
|
-
logger.
|
|
56
|
+
await logger.aexception(f"Error building graph for flow {field_value}")
|
|
58
57
|
|
|
59
58
|
return build_config
|
|
60
59
|
|
|
@@ -1,17 +1,11 @@
|
|
|
1
1
|
import os
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
3
|
from mem0 import Memory, MemoryClient
|
|
5
4
|
|
|
6
5
|
from langflow.base.memory.model import LCChatMemoryComponent
|
|
7
|
-
from langflow.inputs.inputs import
|
|
8
|
-
DictInput,
|
|
9
|
-
HandleInput,
|
|
10
|
-
MessageTextInput,
|
|
11
|
-
NestedDictInput,
|
|
12
|
-
SecretStrInput,
|
|
13
|
-
)
|
|
6
|
+
from langflow.inputs.inputs import DictInput, HandleInput, MessageTextInput, NestedDictInput, SecretStrInput
|
|
14
7
|
from langflow.io import Output
|
|
8
|
+
from langflow.logging.logger import logger
|
|
15
9
|
from langflow.schema.data import Data
|
|
16
10
|
|
|
17
11
|
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
3
|
from requests.exceptions import ConnectionError # noqa: A004
|
|
5
4
|
from urllib3.exceptions import MaxRetryError, NameResolutionError
|
|
6
5
|
|
|
@@ -8,6 +7,7 @@ from langflow.base.models.model import LCModelComponent
|
|
|
8
7
|
from langflow.field_typing import LanguageModel
|
|
9
8
|
from langflow.field_typing.range_spec import RangeSpec
|
|
10
9
|
from langflow.inputs.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput
|
|
10
|
+
from langflow.logging.logger import logger
|
|
11
11
|
from langflow.schema.dotdict import dotdict
|
|
12
12
|
|
|
13
13
|
|
|
@@ -97,7 +97,7 @@ class NVIDIAModelComponent(LCModelComponent):
|
|
|
97
97
|
),
|
|
98
98
|
]
|
|
99
99
|
|
|
100
|
-
def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:
|
|
100
|
+
def get_models(self, *, tool_model_enabled: bool | None = None) -> list[str]:
|
|
101
101
|
try:
|
|
102
102
|
from langchain_nvidia_ai_endpoints import ChatNVIDIA
|
|
103
103
|
except ImportError as e:
|
|
@@ -114,7 +114,7 @@ class NVIDIAModelComponent(LCModelComponent):
|
|
|
114
114
|
def update_build_config(self, build_config: dotdict, _field_value: Any, field_name: str | None = None):
|
|
115
115
|
if field_name in {"model_name", "tool_model_enabled", "base_url", "api_key"}:
|
|
116
116
|
try:
|
|
117
|
-
ids = self.get_models(self.tool_model_enabled)
|
|
117
|
+
ids = self.get_models(tool_model_enabled=self.tool_model_enabled)
|
|
118
118
|
build_config["model_name"]["options"] = ids
|
|
119
119
|
|
|
120
120
|
if "value" not in build_config["model_name"] or build_config["model_name"]["value"] is None:
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import json
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
|
-
from loguru import logger
|
|
5
4
|
|
|
6
5
|
from langflow.custom.custom_component.component import Component
|
|
7
6
|
from langflow.io import MessageTextInput, Output
|
|
7
|
+
from langflow.logging.logger import logger
|
|
8
8
|
from langflow.schema.data import Data
|
|
9
9
|
|
|
10
10
|
|
|
@@ -83,7 +83,7 @@ class OlivyaComponent(Component):
|
|
|
83
83
|
"Content-Type": "application/json",
|
|
84
84
|
}
|
|
85
85
|
|
|
86
|
-
logger.
|
|
86
|
+
await logger.ainfo("Sending POST request with payload: %s", payload)
|
|
87
87
|
|
|
88
88
|
# Send the POST request with a timeout
|
|
89
89
|
async with httpx.AsyncClient() as client:
|
|
@@ -97,19 +97,19 @@ class OlivyaComponent(Component):
|
|
|
97
97
|
|
|
98
98
|
# Parse and return the successful response
|
|
99
99
|
response_data = response.json()
|
|
100
|
-
logger.
|
|
100
|
+
await logger.ainfo("Request successful: %s", response_data)
|
|
101
101
|
|
|
102
102
|
except httpx.HTTPStatusError as http_err:
|
|
103
|
-
logger.
|
|
103
|
+
await logger.aexception("HTTP error occurred")
|
|
104
104
|
response_data = {"error": f"HTTP error occurred: {http_err}", "response_text": response.text}
|
|
105
105
|
except httpx.RequestError as req_err:
|
|
106
|
-
logger.
|
|
106
|
+
await logger.aexception("Request failed")
|
|
107
107
|
response_data = {"error": f"Request failed: {req_err}"}
|
|
108
108
|
except json.JSONDecodeError as json_err:
|
|
109
|
-
logger.
|
|
109
|
+
await logger.aexception("Response parsing failed")
|
|
110
110
|
response_data = {"error": f"Response parsing failed: {json_err}", "raw_response": response.text}
|
|
111
111
|
except Exception as e: # noqa: BLE001
|
|
112
|
-
logger.
|
|
112
|
+
await logger.aexception("An unexpected error occurred")
|
|
113
113
|
response_data = {"error": f"An unexpected error occurred: {e!s}"}
|
|
114
114
|
|
|
115
115
|
# Return the response as part of the output
|