langflow-base-nightly 0.5.0.dev36__py3-none-any.whl → 0.5.0.dev38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langflow/__main__.py +1 -1
- langflow/alembic/versions/4e5980a44eaa_fix_date_times_again.py +24 -30
- langflow/alembic/versions/58b28437a398_modify_nullable.py +6 -6
- langflow/alembic/versions/79e675cb6752_change_datetime_type.py +24 -30
- langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +12 -13
- langflow/api/build.py +21 -26
- langflow/api/health_check_router.py +3 -3
- langflow/api/utils.py +3 -3
- langflow/api/v1/callback.py +2 -2
- langflow/api/v1/chat.py +19 -31
- langflow/api/v1/endpoints.py +10 -10
- langflow/api/v1/flows.py +1 -1
- langflow/api/v1/knowledge_bases.py +19 -12
- langflow/api/v1/mcp.py +12 -12
- langflow/api/v1/mcp_projects.py +45 -81
- langflow/api/v1/mcp_utils.py +8 -8
- langflow/api/v1/schemas.py +1 -5
- langflow/api/v1/store.py +1 -1
- langflow/api/v1/validate.py +2 -2
- langflow/api/v1/voice_mode.py +58 -62
- langflow/api/v2/files.py +5 -3
- langflow/api/v2/mcp.py +10 -9
- langflow/base/composio/composio_base.py +21 -2
- langflow/base/data/docling_utils.py +194 -0
- langflow/base/data/kb_utils.py +33 -0
- langflow/base/embeddings/aiml_embeddings.py +1 -1
- langflow/base/flow_processing/utils.py +1 -2
- langflow/base/io/__init__.py +0 -1
- langflow/base/langwatch/utils.py +2 -1
- langflow/base/mcp/util.py +49 -47
- langflow/base/models/model.py +3 -3
- langflow/base/prompts/api_utils.py +1 -1
- langflow/base/tools/flow_tool.py +2 -2
- langflow/base/tools/run_flow.py +2 -6
- langflow/components/Notion/add_content_to_page.py +2 -2
- langflow/components/Notion/list_database_properties.py +2 -2
- langflow/components/Notion/list_pages.py +2 -2
- langflow/components/Notion/page_content_viewer.py +2 -2
- langflow/components/Notion/update_page_property.py +1 -1
- langflow/components/agentql/agentql_api.py +2 -10
- langflow/components/agents/agent.py +3 -3
- langflow/components/agents/mcp_component.py +54 -69
- langflow/components/anthropic/anthropic.py +5 -4
- langflow/components/assemblyai/assemblyai_get_subtitles.py +2 -2
- langflow/components/assemblyai/assemblyai_lemur.py +2 -2
- langflow/components/assemblyai/assemblyai_list_transcripts.py +2 -2
- langflow/components/assemblyai/assemblyai_poll_transcript.py +2 -2
- langflow/components/assemblyai/assemblyai_start_transcript.py +2 -2
- langflow/components/data/file.py +575 -55
- langflow/components/data/kb_ingest.py +116 -43
- langflow/components/data/kb_retrieval.py +24 -26
- langflow/components/data/url.py +1 -1
- langflow/components/datastax/astra_assistant_manager.py +3 -3
- langflow/components/datastax/create_assistant.py +1 -2
- langflow/components/deactivated/merge_data.py +1 -2
- langflow/components/deactivated/sub_flow.py +6 -7
- langflow/components/deactivated/vectara_self_query.py +3 -3
- langflow/components/docling/__init__.py +0 -198
- langflow/components/docling/docling_inline.py +1 -1
- langflow/components/embeddings/text_embedder.py +3 -3
- langflow/components/firecrawl/firecrawl_extract_api.py +2 -9
- langflow/components/google/gmail.py +1 -1
- langflow/components/google/google_generative_ai.py +5 -11
- langflow/components/groq/groq.py +4 -3
- langflow/components/helpers/current_date.py +2 -3
- langflow/components/helpers/memory.py +1 -1
- langflow/components/ibm/watsonx.py +1 -1
- langflow/components/ibm/watsonx_embeddings.py +1 -1
- langflow/components/langwatch/langwatch.py +3 -3
- langflow/components/logic/flow_tool.py +2 -2
- langflow/components/logic/notify.py +1 -1
- langflow/components/logic/run_flow.py +2 -3
- langflow/components/logic/sub_flow.py +4 -5
- langflow/components/mem0/mem0_chat_memory.py +2 -8
- langflow/components/nvidia/nvidia.py +3 -3
- langflow/components/olivya/olivya.py +7 -7
- langflow/components/ollama/ollama.py +8 -6
- langflow/components/processing/batch_run.py +8 -8
- langflow/components/processing/data_operations.py +2 -2
- langflow/components/processing/merge_data.py +1 -2
- langflow/components/processing/message_to_data.py +2 -3
- langflow/components/processing/parse_json_data.py +1 -1
- langflow/components/processing/save_file.py +6 -32
- langflow/components/prototypes/python_function.py +2 -3
- langflow/components/serpapi/serp.py +1 -1
- langflow/components/tavily/tavily_extract.py +1 -1
- langflow/components/tavily/tavily_search.py +1 -1
- langflow/components/tools/calculator.py +2 -2
- langflow/components/tools/python_code_structured_tool.py +3 -10
- langflow/components/tools/python_repl.py +2 -2
- langflow/components/tools/searxng.py +3 -3
- langflow/components/tools/serp_api.py +2 -2
- langflow/components/tools/tavily_search_tool.py +2 -2
- langflow/components/tools/yahoo_finance.py +1 -1
- langflow/components/twelvelabs/video_embeddings.py +4 -4
- langflow/components/vectorstores/astradb.py +30 -19
- langflow/components/vectorstores/local_db.py +1 -1
- langflow/components/yahoosearch/yahoo.py +1 -1
- langflow/components/youtube/trending.py +3 -4
- langflow/custom/attributes.py +2 -1
- langflow/custom/code_parser/code_parser.py +1 -1
- langflow/custom/custom_component/base_component.py +1 -1
- langflow/custom/custom_component/component.py +16 -2
- langflow/custom/directory_reader/directory_reader.py +7 -7
- langflow/custom/directory_reader/utils.py +1 -2
- langflow/custom/utils.py +30 -30
- langflow/events/event_manager.py +1 -1
- langflow/frontend/assets/{SlackIcon-B260Qg_R.js → SlackIcon-BhW6H3JR.js} +1 -1
- langflow/frontend/assets/{Wikipedia-BB2mbgyd.js → Wikipedia-Dx5jbiy3.js} +1 -1
- langflow/frontend/assets/{Wolfram-DytXC9hF.js → Wolfram-CIyonzwo.js} +1 -1
- langflow/frontend/assets/{index-DPX6X_bw.js → index-0XQqYgdG.js} +1 -1
- langflow/frontend/assets/{index-DtJyCbzF.js → index-1Q3VBqKn.js} +1 -1
- langflow/frontend/assets/{index-DztLFiip.js → index-35sspuLu.js} +1 -1
- langflow/frontend/assets/{index-BeNby7qF.js → index-7hzXChQz.js} +1 -1
- langflow/frontend/assets/{index-BOEf7-ty.js → index-8cuhogZP.js} +1 -1
- langflow/frontend/assets/{index-D0s9f6Re.js → index-B0m53xKd.js} +1 -1
- langflow/frontend/assets/{index-DpJiH-Rk.js → index-B1XqWJhG.js} +1 -1
- langflow/frontend/assets/{index-DuAeoC-H.js → index-B3KCdQ91.js} +1 -1
- langflow/frontend/assets/{index-Bxml6wXu.js → index-B7uEuOPK.js} +1 -1
- langflow/frontend/assets/{index-CDFLVFB4.js → index-B8UR8v-Q.js} +1 -1
- langflow/frontend/assets/{index-ci4XHjbJ.js → index-BD7Io1hL.js} +6 -6
- langflow/frontend/assets/{index-DasrI03Y.js → index-BDQrd7Tj.js} +1 -1
- langflow/frontend/assets/{index-CkQ-bJ4G.js → index-BDuk0d7P.js} +1 -1
- langflow/frontend/assets/{index-C_1RBTul.js → index-BFQ8KFK0.js} +1 -1
- langflow/frontend/assets/{index-DqSH4x-R.js → index-BFf0HTFI.js} +1 -1
- langflow/frontend/assets/{index-BXMhmvTj.js → index-BHhnpSkW.js} +1 -1
- langflow/frontend/assets/{index-Uq2ij_SS.js → index-BKKrUElc.js} +1 -1
- langflow/frontend/assets/{index-3TJWUdmx.js → index-BKeZt2hQ.js} +1 -1
- langflow/frontend/assets/{index-DHlEwAxb.js → index-BKlQbl-6.js} +1 -1
- langflow/frontend/assets/{index-Bisa4IQF.js → index-BLYw9MK2.js} +1 -1
- langflow/frontend/assets/{index-GODbXlHC.js → index-BLsVo9iW.js} +1 -1
- langflow/frontend/assets/{index-CHFO5O4g.js → index-BNQIbda3.js} +1 -1
- langflow/frontend/assets/{index-3uOAA_XX.js → index-BPR2mEFC.js} +1 -1
- langflow/frontend/assets/{index-3qMh9x6K.js → index-BPfdqCc_.js} +1 -1
- langflow/frontend/assets/{index-rcdQpNcU.js → index-BQrVDjR1.js} +1 -1
- langflow/frontend/assets/{index-4eRtaV45.js → index-BRmSeoWR.js} +1 -1
- langflow/frontend/assets/{index-Ct9_T9ox.js → index-BUse-kxM.js} +1 -1
- langflow/frontend/assets/{index-BdYgKk1d.js → index-BVFaF7HW.js} +1 -1
- langflow/frontend/assets/{index-CWWo2zOA.js → index-BWgIWfv2.js} +1 -1
- langflow/frontend/assets/{index-Du9aJK7m.js → index-BWt5xGeA.js} +1 -1
- langflow/frontend/assets/{index-Baka5dKE.js → index-BYhcGLTV.js} +1 -1
- langflow/frontend/assets/{index-BWq9GTzt.js → index-BYjw7Gk3.js} +1 -1
- langflow/frontend/assets/{index-r1LZg-PY.js → index-BZFljdMa.js} +1 -1
- langflow/frontend/assets/index-BcAgItH4.js +1 -0
- langflow/frontend/assets/{index-B8TlNgn-.js → index-Bct1s6__.js} +1 -1
- langflow/frontend/assets/{index-DZzbmg3J.js → index-Bhv79Zso.js} +1 -1
- langflow/frontend/assets/{index-CqDUqHfd.js → index-Bj3lSwvZ.js} +1 -1
- langflow/frontend/assets/{index-dkS0ek2S.js → index-Bk4mTwnI.js} +1 -1
- langflow/frontend/assets/{index-tOy_uloT.js → index-BmIx1cws.js} +1 -1
- langflow/frontend/assets/{index-BVtf6m9S.js → index-BmYJJ5YS.js} +1 -1
- langflow/frontend/assets/{index-mBjJYD9q.js → index-BnAFhkSN.js} +1 -1
- langflow/frontend/assets/{index-Ba3RTMXI.js → index-Bo-ww0Bb.js} +1 -1
- langflow/frontend/assets/{index-BsBWP-Dh.js → index-BpmqDOeZ.js} +1 -1
- langflow/frontend/assets/{index-BqUeOc7Y.js → index-BrVhdPZb.js} +1 -1
- langflow/frontend/assets/{index-DWkMJnbd.js → index-BvGQfVBD.js} +1 -1
- langflow/frontend/assets/{index-DdzVmJHE.js → index-Bwi4flFg.js} +1 -1
- langflow/frontend/assets/{index-Ccb5B8zG.js → index-BzoRPtTY.js} +1 -1
- langflow/frontend/assets/{index-Ym6gz0T6.js → index-C--IDAyc.js} +1 -1
- langflow/frontend/assets/{index-CvQ0w8Pj.js → index-C0E3_MIK.js} +1 -1
- langflow/frontend/assets/{index-DxIs8VSp.js → index-C27Jj_26.js} +1 -1
- langflow/frontend/assets/{index-BxWXWRmZ.js → index-C2eQmQsn.js} +1 -1
- langflow/frontend/assets/{index-B536IPXH.js → index-C8K0r39B.js} +1 -1
- langflow/frontend/assets/{index-BEDxAk3N.js → index-CEJNWPhA.js} +1 -1
- langflow/frontend/assets/{index-G_U_kPAd.js → index-CFNTYfFK.js} +1 -1
- langflow/frontend/assets/{index-CMGZGIx_.js → index-CMHpjHZl.js} +1 -1
- langflow/frontend/assets/{index-C76aBV_h.js → index-CSu8KHOi.js} +1 -1
- langflow/frontend/assets/{index-B-c82Fnu.js → index-CUKmGsI6.js} +1 -1
- langflow/frontend/assets/{index-DX7XsAcx.js → index-CWYiSeWV.js} +1 -1
- langflow/frontend/assets/{index-COL0eiWI.js → index-CY7_TBTC.js} +1 -1
- langflow/frontend/assets/{index-BlBl2tvQ.js → index-CbnWRlYY.js} +1 -1
- langflow/frontend/assets/{index-BQB-iDYl.js → index-CfPBgkqg.js} +1 -1
- langflow/frontend/assets/{index-DWr_zPkx.js → index-Cg53lrYh.js} +1 -1
- langflow/frontend/assets/{index-BcgB3rXH.js → index-CgU7KF4I.js} +1 -1
- langflow/frontend/assets/{index-CkSzjCqM.js → index-CgwykVGh.js} +1 -1
- langflow/frontend/assets/{index-BbsND1Qg.js → index-Ch5r0oW6.js} +1 -1
- langflow/frontend/assets/{index-AY5Dm2mG.js → index-CjsommIr.js} +1 -1
- langflow/frontend/assets/{index-BtJ2o21k.js → index-CkK25zZO.js} +1 -1
- langflow/frontend/assets/{index-BKvKC-12.js → index-CkjwSTSM.js} +1 -1
- langflow/frontend/assets/{index-BVHvIhT5.js → index-CmSFKgiD.js} +1 -1
- langflow/frontend/assets/{index-D-zkHcob.js → index-Cr5v2ave.js} +1 -1
- langflow/frontend/assets/{index-js8ceOaP.js → index-CrAF-31Y.js} +1 -1
- langflow/frontend/assets/{index-BNbWMmAV.js → index-CsLQiWNf.js} +1 -1
- langflow/frontend/assets/{index-VcXZzovW.js → index-CuCM7Wu7.js} +1 -1
- langflow/frontend/assets/{index-DzeIsaBm.js → index-Cxy9sEpy.js} +1 -1
- langflow/frontend/assets/{index-LrMzDsq9.js → index-CyP3py8K.js} +1 -1
- langflow/frontend/assets/{index-C8KD3LPb.js → index-CzHzeZuA.js} +1 -1
- langflow/frontend/assets/{index-DS1EgA10.js → index-D1oynC8a.js} +1 -1
- langflow/frontend/assets/{index-ByFXr9Iq.js → index-D4tjMhfY.js} +1 -1
- langflow/frontend/assets/{index-DyJDHm2D.js → index-D6CSIrp1.js} +1 -1
- langflow/frontend/assets/{index-DIqSyDVO.js → index-D9kwEzPB.js} +1 -1
- langflow/frontend/assets/{index-D5PeCofu.js → index-DDXsm8tz.js} +1 -1
- langflow/frontend/assets/{index-CJwYfDBz.js → index-DDhJVVel.js} +1 -1
- langflow/frontend/assets/{index-C7x9R_Yo.js → index-DH6o91_s.js} +1 -1
- langflow/frontend/assets/{index-DpQKtcXu.js → index-DHngW1k8.js} +1 -1
- langflow/frontend/assets/{index-VZnN0P6C.js → index-DIKUsGLF.js} +1 -1
- langflow/frontend/assets/{index-VHmUHUUU.js → index-DJESSNJi.js} +1 -1
- langflow/frontend/assets/{index-BdIWbCEL.js → index-DMCWDJOl.js} +1 -1
- langflow/frontend/assets/{index-DK8vNpXK.js → index-DOEvKC2X.js} +1 -1
- langflow/frontend/assets/{index-C7V5U9yH.js → index-DOQDkSoK.js} +1 -1
- langflow/frontend/assets/{index-D0HmkH0H.js → index-DXAfIEvs.js} +1 -1
- langflow/frontend/assets/{index-C9N80hP8.js → index-DZP_SaHb.js} +1 -1
- langflow/frontend/assets/{index-B2ggrBuR.js → index-DZxUIhWh.js} +1 -1
- langflow/frontend/assets/{index-DS9I4y48.js → index-Dda2u_yz.js} +1 -1
- langflow/frontend/assets/{index-BLROcaSz.js → index-Dg8N3NSO.js} +1 -1
- langflow/frontend/assets/{index-Dpz3oBf5.js → index-DkGhPNeA.js} +1 -1
- langflow/frontend/assets/{index-BnLT29qW.js → index-Dka_Rk4-.js} +1 -1
- langflow/frontend/assets/{index-B5ed-sAv.js → index-DljpLeCW.js} +1 -1
- langflow/frontend/assets/{index-Cx__T92e.js → index-DnVYJtVO.js} +1 -1
- langflow/frontend/assets/{index-hOkEW3JP.js → index-DqbzUcI5.js} +1 -1
- langflow/frontend/assets/{index-BxkZkBgQ.js → index-Dr6pVDPI.js} +1 -1
- langflow/frontend/assets/{index-BIkqesA-.js → index-DsoX2o1S.js} +1 -1
- langflow/frontend/assets/{index-Cpgkb0Q3.js → index-DwfHWnX7.js} +1 -1
- langflow/frontend/assets/{index-B9Mo3ndZ.js → index-Dx-Z87KT.js} +1 -1
- langflow/frontend/assets/{index-R7q8cAek.js → index-DyqITq51.js} +1 -1
- langflow/frontend/assets/{index-DKEXZFUO.js → index-DzIv3RyR.js} +1 -1
- langflow/frontend/assets/{index-BJrY2Fiu.js → index-G4ro0MjT.js} +1 -1
- langflow/frontend/assets/{index-IFGgPiye.js → index-H7J7w7fa.js} +1 -1
- langflow/frontend/assets/{index-lKEJpUsF.js → index-KWY77KfV.js} +1 -1
- langflow/frontend/assets/{index-DDNNv4C0.js → index-U9GWm1eH.js} +1 -1
- langflow/frontend/assets/{index-BRWNIt9F.js → index-Un9pWxnP.js} +1 -1
- langflow/frontend/assets/{index-BCK-ZyIh.js → index-Xi4TplbI.js} +1 -1
- langflow/frontend/assets/{index-BEKoRwsX.js → index-_cbGmjF4.js} +1 -1
- langflow/frontend/assets/{index-7xXgqu09.js → index-cEXY6V06.js} +1 -1
- langflow/frontend/assets/{index-D87Zw62M.js → index-dyXKnkMi.js} +1 -1
- langflow/frontend/assets/{index-CG7cp0nD.js → index-eUkS6iJM.js} +1 -1
- langflow/frontend/assets/{index-CoUlHbtg.js → index-ekfMOqrF.js} +1 -1
- langflow/frontend/assets/{index-DhzEUXfr.js → index-gdb7XMS8.js} +1 -1
- langflow/frontend/assets/{index-D9eflZfP.js → index-hZUcL0MZ.js} +1 -1
- langflow/frontend/assets/{index-CwIxqYlT.js → index-kkA-qHB_.js} +1 -1
- langflow/frontend/assets/{index-sS6XLk3j.js → index-mzl9ULw5.js} +1 -1
- langflow/frontend/assets/{index-BjENqyKe.js → index-oxHBZk2v.js} +1 -1
- langflow/frontend/assets/{index-BejHxU5W.js → index-p2kStSPe.js} +1 -1
- langflow/frontend/assets/{index-BOYTBrh9.js → index-paQEWYGT.js} +1 -1
- langflow/frontend/assets/{index-Cd5zuUUK.js → index-r_8gs4nL.js} +1 -1
- langflow/frontend/assets/{index-AlJ7td-D.js → index-uiKla4UR.js} +1 -1
- langflow/frontend/assets/{index-B8y58M9b.js → index-vJOO5U8M.js} +1 -1
- langflow/frontend/assets/{index-CF4dtI6S.js → index-w72fDjpG.js} +1 -1
- langflow/frontend/assets/{index-C2Xd7UkR.js → index-zV82kQ6k.js} +1 -1
- langflow/frontend/assets/lazyIconImports-DTNgvPE-.js +2 -0
- langflow/frontend/assets/{use-post-add-user-HN0rRnhv.js → use-post-add-user-CvtuazTg.js} +1 -1
- langflow/frontend/index.html +1 -1
- langflow/graph/edge/base.py +2 -3
- langflow/graph/graph/base.py +14 -12
- langflow/graph/graph/constants.py +3 -0
- langflow/graph/utils.py +6 -6
- langflow/graph/vertex/base.py +4 -5
- langflow/graph/vertex/param_handler.py +1 -1
- langflow/graph/vertex/vertex_types.py +2 -2
- langflow/helpers/flow.py +1 -1
- langflow/initial_setup/setup.py +32 -30
- langflow/initial_setup/starter_projects/Blog Writer.json +2 -2
- langflow/initial_setup/starter_projects/Custom Component Generator.json +2 -2
- langflow/initial_setup/starter_projects/Document Q&A.json +1 -1
- langflow/initial_setup/starter_projects/Hybrid Search RAG.json +2 -2
- langflow/initial_setup/starter_projects/Instagram Copywriter.json +3 -3
- langflow/initial_setup/starter_projects/Invoice Summarizer.json +1 -1
- langflow/initial_setup/starter_projects/Knowledge Ingestion.json +4 -4
- langflow/initial_setup/starter_projects/Knowledge Retrieval.json +2 -2
- langflow/initial_setup/starter_projects/Market Research.json +3 -3
- langflow/initial_setup/starter_projects/Meeting Summary.json +6 -6
- langflow/initial_setup/starter_projects/Memory Chatbot.json +2 -2
- langflow/initial_setup/starter_projects/News Aggregator.json +5 -22
- langflow/initial_setup/starter_projects/Nvidia Remix.json +3 -20
- langflow/initial_setup/starter_projects/Pok/303/251dex Agent.json" +1 -1
- langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +1 -1
- langflow/initial_setup/starter_projects/Price Deal Finder.json +5 -5
- langflow/initial_setup/starter_projects/Research Agent.json +3 -3
- langflow/initial_setup/starter_projects/SaaS Pricing.json +1 -1
- langflow/initial_setup/starter_projects/Search agent.json +1 -1
- langflow/initial_setup/starter_projects/Sequential Tasks Agents.json +7 -7
- langflow/initial_setup/starter_projects/Simple Agent.json +3 -3
- langflow/initial_setup/starter_projects/Social Media Agent.json +1 -1
- langflow/initial_setup/starter_projects/Text Sentiment Analysis.json +1 -1
- langflow/initial_setup/starter_projects/Travel Planning Agents.json +3 -3
- langflow/initial_setup/starter_projects/Vector Store RAG.json +5 -5
- langflow/initial_setup/starter_projects/Youtube Analysis.json +3 -3
- langflow/interface/components.py +23 -22
- langflow/interface/initialize/loading.py +5 -5
- langflow/interface/run.py +1 -1
- langflow/interface/utils.py +1 -1
- langflow/io/__init__.py +0 -1
- langflow/langflow_launcher.py +1 -1
- langflow/load/load.py +2 -7
- langflow/logging/__init__.py +0 -1
- langflow/logging/logger.py +191 -115
- langflow/logging/setup.py +1 -1
- langflow/main.py +37 -52
- langflow/memory.py +7 -7
- langflow/middleware.py +1 -1
- langflow/processing/process.py +4 -4
- langflow/schema/artifact.py +2 -2
- langflow/schema/data.py +10 -2
- langflow/schema/dataframe.py +1 -1
- langflow/schema/message.py +1 -1
- langflow/serialization/serialization.py +1 -1
- langflow/services/auth/utils.py +2 -2
- langflow/services/cache/disk.py +1 -1
- langflow/services/cache/service.py +3 -3
- langflow/services/database/models/flow/model.py +2 -7
- langflow/services/database/models/transactions/crud.py +2 -2
- langflow/services/database/models/user/crud.py +2 -2
- langflow/services/database/service.py +8 -8
- langflow/services/database/utils.py +6 -5
- langflow/services/deps.py +2 -3
- langflow/services/factory.py +1 -1
- langflow/services/flow/flow_runner.py +7 -12
- langflow/services/job_queue/service.py +16 -15
- langflow/services/manager.py +3 -4
- langflow/services/settings/auth.py +1 -1
- langflow/services/settings/base.py +3 -8
- langflow/services/settings/manager.py +1 -1
- langflow/services/settings/utils.py +1 -1
- langflow/services/socket/__init__.py +0 -1
- langflow/services/socket/service.py +3 -3
- langflow/services/socket/utils.py +4 -4
- langflow/services/state/service.py +1 -2
- langflow/services/storage/factory.py +1 -1
- langflow/services/storage/local.py +9 -8
- langflow/services/storage/s3.py +11 -10
- langflow/services/store/service.py +3 -3
- langflow/services/store/utils.py +3 -2
- langflow/services/task/temp_flow_cleanup.py +7 -7
- langflow/services/telemetry/service.py +10 -10
- langflow/services/tracing/arize_phoenix.py +2 -2
- langflow/services/tracing/langfuse.py +1 -1
- langflow/services/tracing/langsmith.py +1 -1
- langflow/services/tracing/langwatch.py +1 -1
- langflow/services/tracing/opik.py +1 -1
- langflow/services/tracing/service.py +25 -6
- langflow/services/tracing/traceloop.py +245 -0
- langflow/services/utils.py +7 -7
- langflow/services/variable/kubernetes.py +3 -3
- langflow/services/variable/kubernetes_secrets.py +2 -1
- langflow/services/variable/service.py +5 -5
- langflow/utils/component_utils.py +9 -6
- langflow/utils/util.py +5 -5
- langflow/utils/validate.py +3 -3
- langflow/utils/voice_utils.py +2 -2
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/METADATA +2 -1
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/RECORD +342 -340
- langflow/frontend/assets/lazyIconImports-Bh1TFfvH.js +0 -2
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/WHEEL +0 -0
- {langflow_base_nightly-0.5.0.dev36.dist-info → langflow_base_nightly-0.5.0.dev38.dist-info}/entry_points.txt +0 -0
langflow/components/data/file.py
CHANGED
|
@@ -1,26 +1,118 @@
|
|
|
1
|
+
"""Enhanced file component v2 with mypy and ruff compliance."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
1
5
|
from copy import deepcopy
|
|
2
|
-
from
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
3
8
|
|
|
4
9
|
from langflow.base.data.base_file import BaseFileComponent
|
|
5
10
|
from langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data
|
|
6
|
-
from langflow.io import
|
|
11
|
+
from langflow.io import (
|
|
12
|
+
BoolInput,
|
|
13
|
+
DropdownInput,
|
|
14
|
+
FileInput,
|
|
15
|
+
IntInput,
|
|
16
|
+
MessageTextInput,
|
|
17
|
+
Output,
|
|
18
|
+
StrInput,
|
|
19
|
+
)
|
|
7
20
|
from langflow.schema.data import Data
|
|
21
|
+
from langflow.schema.message import Message
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from langflow.schema import DataFrame
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class MockConversionStatus(Enum):
|
|
28
|
+
"""Mock ConversionStatus for fallback compatibility."""
|
|
29
|
+
|
|
30
|
+
SUCCESS = "success"
|
|
31
|
+
FAILURE = "failure"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class MockInputFormat(Enum):
|
|
35
|
+
"""Mock InputFormat for fallback compatibility."""
|
|
36
|
+
|
|
37
|
+
PDF = "pdf"
|
|
38
|
+
IMAGE = "image"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MockImageRefMode(Enum):
|
|
42
|
+
"""Mock ImageRefMode for fallback compatibility."""
|
|
43
|
+
|
|
44
|
+
PLACEHOLDER = "placeholder"
|
|
45
|
+
EMBEDDED = "embedded"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class DoclingImports:
|
|
49
|
+
"""Container for docling imports with type information."""
|
|
50
|
+
|
|
51
|
+
def __init__(
|
|
52
|
+
self,
|
|
53
|
+
conversion_status: type[Enum],
|
|
54
|
+
input_format: type[Enum],
|
|
55
|
+
document_converter: type,
|
|
56
|
+
image_ref_mode: type[Enum],
|
|
57
|
+
strategy: str,
|
|
58
|
+
) -> None:
|
|
59
|
+
self.conversion_status = conversion_status
|
|
60
|
+
self.input_format = input_format
|
|
61
|
+
self.document_converter = document_converter
|
|
62
|
+
self.image_ref_mode = image_ref_mode
|
|
63
|
+
self.strategy = strategy
|
|
8
64
|
|
|
9
65
|
|
|
10
66
|
class FileComponent(BaseFileComponent):
|
|
11
|
-
"""
|
|
67
|
+
"""Enhanced file component v2 that combines standard file loading with optional Docling processing and export.
|
|
12
68
|
|
|
13
|
-
This component supports
|
|
14
|
-
|
|
69
|
+
This component supports all features of the standard File component, plus an advanced mode
|
|
70
|
+
that enables Docling document processing and export to various formats (Markdown, HTML, etc.).
|
|
15
71
|
"""
|
|
16
72
|
|
|
17
73
|
display_name = "File"
|
|
18
|
-
description = "Loads content from
|
|
74
|
+
description = "Loads content from files with optional advanced document processing and export using Docling."
|
|
19
75
|
documentation: str = "https://docs.langflow.org/components-data#file"
|
|
20
76
|
icon = "file-text"
|
|
21
77
|
name = "File"
|
|
22
78
|
|
|
23
|
-
|
|
79
|
+
# Docling supported formats from original component
|
|
80
|
+
VALID_EXTENSIONS = [
|
|
81
|
+
"adoc",
|
|
82
|
+
"asciidoc",
|
|
83
|
+
"asc",
|
|
84
|
+
"bmp",
|
|
85
|
+
"csv",
|
|
86
|
+
"dotx",
|
|
87
|
+
"dotm",
|
|
88
|
+
"docm",
|
|
89
|
+
"docx",
|
|
90
|
+
"htm",
|
|
91
|
+
"html",
|
|
92
|
+
"jpeg",
|
|
93
|
+
"json",
|
|
94
|
+
"md",
|
|
95
|
+
"pdf",
|
|
96
|
+
"png",
|
|
97
|
+
"potx",
|
|
98
|
+
"ppsx",
|
|
99
|
+
"pptm",
|
|
100
|
+
"potm",
|
|
101
|
+
"ppsm",
|
|
102
|
+
"pptx",
|
|
103
|
+
"tiff",
|
|
104
|
+
"txt",
|
|
105
|
+
"xls",
|
|
106
|
+
"xlsx",
|
|
107
|
+
"xhtml",
|
|
108
|
+
"xml",
|
|
109
|
+
"webp",
|
|
110
|
+
*TEXT_FILE_TYPES,
|
|
111
|
+
]
|
|
112
|
+
|
|
113
|
+
# Fixed export settings
|
|
114
|
+
EXPORT_FORMAT = "Markdown"
|
|
115
|
+
IMAGE_MODE = "placeholder"
|
|
24
116
|
|
|
25
117
|
_base_inputs = deepcopy(BaseFileComponent._base_inputs)
|
|
26
118
|
|
|
@@ -31,6 +123,58 @@ class FileComponent(BaseFileComponent):
|
|
|
31
123
|
|
|
32
124
|
inputs = [
|
|
33
125
|
*_base_inputs,
|
|
126
|
+
BoolInput(
|
|
127
|
+
name="advanced_mode",
|
|
128
|
+
display_name="Advanced Parser",
|
|
129
|
+
value=False,
|
|
130
|
+
real_time_refresh=True,
|
|
131
|
+
info=(
|
|
132
|
+
"Enable advanced document processing and export with Docling for PDFs, images, and office documents. "
|
|
133
|
+
"Available only for single file processing."
|
|
134
|
+
),
|
|
135
|
+
show=False,
|
|
136
|
+
),
|
|
137
|
+
DropdownInput(
|
|
138
|
+
name="pipeline",
|
|
139
|
+
display_name="Pipeline",
|
|
140
|
+
info="Docling pipeline to use",
|
|
141
|
+
options=["standard", "vlm"],
|
|
142
|
+
value="standard",
|
|
143
|
+
advanced=True,
|
|
144
|
+
),
|
|
145
|
+
DropdownInput(
|
|
146
|
+
name="ocr_engine",
|
|
147
|
+
display_name="OCR Engine",
|
|
148
|
+
info="OCR engine to use. Only available when pipeline is set to 'standard'.",
|
|
149
|
+
options=["", "easyocr"],
|
|
150
|
+
value="",
|
|
151
|
+
show=False,
|
|
152
|
+
advanced=True,
|
|
153
|
+
),
|
|
154
|
+
StrInput(
|
|
155
|
+
name="md_image_placeholder",
|
|
156
|
+
display_name="Image placeholder",
|
|
157
|
+
info="Specify the image placeholder for markdown exports.",
|
|
158
|
+
value="<!-- image -->",
|
|
159
|
+
advanced=True,
|
|
160
|
+
show=False,
|
|
161
|
+
),
|
|
162
|
+
StrInput(
|
|
163
|
+
name="md_page_break_placeholder",
|
|
164
|
+
display_name="Page break placeholder",
|
|
165
|
+
info="Add this placeholder between pages in the markdown output.",
|
|
166
|
+
value="",
|
|
167
|
+
advanced=True,
|
|
168
|
+
show=False,
|
|
169
|
+
),
|
|
170
|
+
MessageTextInput(
|
|
171
|
+
name="doc_key",
|
|
172
|
+
display_name="Doc Key",
|
|
173
|
+
info="The key to use for the DoclingDocument column.",
|
|
174
|
+
value="doc",
|
|
175
|
+
advanced=True,
|
|
176
|
+
show=False,
|
|
177
|
+
),
|
|
34
178
|
BoolInput(
|
|
35
179
|
name="use_multithreading",
|
|
36
180
|
display_name="[Deprecated] Use Multithreading",
|
|
@@ -45,60 +189,284 @@ class FileComponent(BaseFileComponent):
|
|
|
45
189
|
info="When multiple files are being processed, the number of files to process concurrently.",
|
|
46
190
|
value=1,
|
|
47
191
|
),
|
|
192
|
+
BoolInput(
|
|
193
|
+
name="markdown",
|
|
194
|
+
display_name="Markdown Export",
|
|
195
|
+
info="Export processed documents to Markdown format. Only available when advanced mode is enabled.",
|
|
196
|
+
value=False,
|
|
197
|
+
show=False,
|
|
198
|
+
),
|
|
48
199
|
]
|
|
49
200
|
|
|
50
201
|
outputs = [
|
|
51
202
|
Output(display_name="Raw Content", name="message", method="load_files_message"),
|
|
52
203
|
]
|
|
53
204
|
|
|
54
|
-
def
|
|
55
|
-
|
|
205
|
+
def _path_value(self, template) -> list[str]:
|
|
206
|
+
# Get current path value
|
|
207
|
+
return template.get("path", {}).get("file_path", [])
|
|
208
|
+
|
|
209
|
+
def update_build_config(
|
|
210
|
+
self,
|
|
211
|
+
build_config: dict[str, Any],
|
|
212
|
+
field_value: Any,
|
|
213
|
+
field_name: str | None = None,
|
|
214
|
+
) -> dict[str, Any]:
|
|
215
|
+
"""Update build configuration to show/hide fields based on file count and advanced_mode."""
|
|
56
216
|
if field_name == "path":
|
|
57
|
-
#
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
if
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
217
|
+
# Get current path value
|
|
218
|
+
path_value = self._path_value(build_config)
|
|
219
|
+
file_path = path_value[0] if len(path_value) > 0 else ""
|
|
220
|
+
|
|
221
|
+
# Show/hide Advanced Parser based on file count (only for single files)
|
|
222
|
+
file_count = len(field_value) if field_value else 0
|
|
223
|
+
if file_count == 1 and not file_path.endswith((".csv", ".xlsx", ".parquet")):
|
|
224
|
+
build_config["advanced_mode"]["show"] = True
|
|
225
|
+
else:
|
|
226
|
+
build_config["advanced_mode"]["show"] = False
|
|
227
|
+
build_config["advanced_mode"]["value"] = False # Reset to False when hidden
|
|
228
|
+
|
|
229
|
+
# Hide all advanced fields when Advanced Parser is not available
|
|
230
|
+
advanced_fields = [
|
|
231
|
+
"pipeline",
|
|
232
|
+
"ocr_engine",
|
|
233
|
+
"doc_key",
|
|
234
|
+
"md_image_placeholder",
|
|
235
|
+
"md_page_break_placeholder",
|
|
236
|
+
]
|
|
237
|
+
for field in advanced_fields:
|
|
238
|
+
if field in build_config:
|
|
239
|
+
build_config[field]["show"] = False
|
|
240
|
+
|
|
241
|
+
elif field_name == "advanced_mode":
|
|
242
|
+
# Show/hide advanced fields based on advanced_mode (only if single file)
|
|
243
|
+
advanced_fields = [
|
|
244
|
+
"pipeline",
|
|
245
|
+
"ocr_engine",
|
|
246
|
+
"doc_key",
|
|
247
|
+
"md_image_placeholder",
|
|
248
|
+
"md_page_break_placeholder",
|
|
249
|
+
]
|
|
250
|
+
|
|
251
|
+
for field in advanced_fields:
|
|
252
|
+
if field in build_config:
|
|
253
|
+
build_config[field]["show"] = field_value
|
|
254
|
+
|
|
255
|
+
return build_config
|
|
256
|
+
|
|
257
|
+
def update_outputs(self, frontend_node: dict[str, Any], field_name: str, field_value: Any) -> dict[str, Any]: # noqa: ARG002
|
|
258
|
+
"""Dynamically show outputs based on the number of files and their types."""
|
|
259
|
+
if field_name not in ["path", "advanced_mode"]:
|
|
260
|
+
return frontend_node
|
|
261
|
+
|
|
262
|
+
# Add outputs based on the number of files in the path
|
|
263
|
+
template = frontend_node.get("template", {})
|
|
264
|
+
path_value = self._path_value(template)
|
|
265
|
+
if len(path_value) == 0:
|
|
266
|
+
return frontend_node
|
|
267
|
+
|
|
268
|
+
# Clear existing outputs
|
|
269
|
+
frontend_node["outputs"] = []
|
|
270
|
+
|
|
271
|
+
if len(path_value) == 1:
|
|
272
|
+
# We need to check if the file is structured content
|
|
273
|
+
file_path = path_value[0] if field_name == "path" else frontend_node["template"]["path"]["file_path"][0]
|
|
274
|
+
if file_path.endswith((".csv", ".xlsx", ".parquet")):
|
|
76
275
|
frontend_node["outputs"].append(
|
|
77
|
-
Output(display_name="
|
|
276
|
+
Output(display_name="Structured Content", name="dataframe", method="load_files_structured"),
|
|
277
|
+
)
|
|
278
|
+
elif file_path.endswith(".json"):
|
|
279
|
+
frontend_node["outputs"].append(
|
|
280
|
+
Output(display_name="Structured Content", name="json", method="load_files_json"),
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Add outputs based on advanced mode
|
|
284
|
+
advanced_mode = frontend_node.get("template", {}).get("advanced_mode", {}).get("value", False)
|
|
285
|
+
|
|
286
|
+
if advanced_mode:
|
|
287
|
+
# Advanced mode: Structured Output, Markdown, and File Path
|
|
288
|
+
frontend_node["outputs"].append(
|
|
289
|
+
Output(display_name="Structured Output", name="advanced", method="load_files_advanced"),
|
|
290
|
+
)
|
|
291
|
+
frontend_node["outputs"].append(
|
|
292
|
+
Output(display_name="Markdown", name="markdown", method="load_files_markdown"),
|
|
78
293
|
)
|
|
79
294
|
frontend_node["outputs"].append(
|
|
80
295
|
Output(display_name="File Path", name="path", method="load_files_path"),
|
|
81
296
|
)
|
|
82
297
|
else:
|
|
83
|
-
#
|
|
298
|
+
# Normal mode: Raw Content and File Path
|
|
84
299
|
frontend_node["outputs"].append(
|
|
85
|
-
Output(display_name="
|
|
300
|
+
Output(display_name="Raw Content", name="message", method="load_files_message"),
|
|
86
301
|
)
|
|
302
|
+
frontend_node["outputs"].append(
|
|
303
|
+
Output(display_name="File Path", name="path", method="load_files_path"),
|
|
304
|
+
)
|
|
305
|
+
else:
|
|
306
|
+
# For multiple files, we show the files output (DataFrame format)
|
|
307
|
+
# Advanced Parser is not available for multiple files
|
|
308
|
+
frontend_node["outputs"].append(
|
|
309
|
+
Output(display_name="Files", name="dataframe", method="load_files"),
|
|
310
|
+
)
|
|
87
311
|
|
|
88
312
|
return frontend_node
|
|
89
313
|
|
|
90
|
-
def
|
|
91
|
-
"""
|
|
314
|
+
def _try_import_docling(self) -> DoclingImports | None:
|
|
315
|
+
"""Try different import strategies for docling components."""
|
|
316
|
+
# Try strategy 1: Latest docling structure
|
|
317
|
+
try:
|
|
318
|
+
from docling.datamodel.base_models import ConversionStatus, InputFormat # type: ignore[import-untyped]
|
|
319
|
+
from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
|
|
320
|
+
from docling_core.types.doc import ImageRefMode # type: ignore[import-untyped]
|
|
321
|
+
|
|
322
|
+
self.log("Using latest docling import structure")
|
|
323
|
+
return DoclingImports(
|
|
324
|
+
conversion_status=ConversionStatus,
|
|
325
|
+
input_format=InputFormat,
|
|
326
|
+
document_converter=DocumentConverter,
|
|
327
|
+
image_ref_mode=ImageRefMode,
|
|
328
|
+
strategy="latest",
|
|
329
|
+
)
|
|
330
|
+
except ImportError as e:
|
|
331
|
+
self.log(f"Latest docling structure failed: {e}")
|
|
332
|
+
|
|
333
|
+
# Try strategy 2: Alternative import paths
|
|
334
|
+
try:
|
|
335
|
+
from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
|
|
336
|
+
from docling_core.types.doc import ImageRefMode # type: ignore[import-untyped]
|
|
337
|
+
|
|
338
|
+
# Try to get ConversionStatus from different locations
|
|
339
|
+
conversion_status: type[Enum] = MockConversionStatus
|
|
340
|
+
input_format: type[Enum] = MockInputFormat
|
|
341
|
+
|
|
342
|
+
try:
|
|
343
|
+
from docling_core.types import ConversionStatus, InputFormat # type: ignore[import-untyped]
|
|
344
|
+
|
|
345
|
+
conversion_status = ConversionStatus
|
|
346
|
+
input_format = InputFormat
|
|
347
|
+
except ImportError:
|
|
348
|
+
try:
|
|
349
|
+
from docling.datamodel import ConversionStatus, InputFormat # type: ignore[import-untyped]
|
|
350
|
+
|
|
351
|
+
conversion_status = ConversionStatus
|
|
352
|
+
input_format = InputFormat
|
|
353
|
+
except ImportError:
|
|
354
|
+
# Use mock enums if we can't find them
|
|
355
|
+
pass
|
|
356
|
+
|
|
357
|
+
self.log("Using alternative docling import structure")
|
|
358
|
+
return DoclingImports(
|
|
359
|
+
conversion_status=conversion_status,
|
|
360
|
+
input_format=input_format,
|
|
361
|
+
document_converter=DocumentConverter,
|
|
362
|
+
image_ref_mode=ImageRefMode,
|
|
363
|
+
strategy="alternative",
|
|
364
|
+
)
|
|
365
|
+
except ImportError as e:
|
|
366
|
+
self.log(f"Alternative docling structure failed: {e}")
|
|
367
|
+
|
|
368
|
+
# Try strategy 3: Basic converter only
|
|
369
|
+
try:
|
|
370
|
+
from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
|
|
371
|
+
|
|
372
|
+
self.log("Using basic docling import structure with mocks")
|
|
373
|
+
return DoclingImports(
|
|
374
|
+
conversion_status=MockConversionStatus,
|
|
375
|
+
input_format=MockInputFormat,
|
|
376
|
+
document_converter=DocumentConverter,
|
|
377
|
+
image_ref_mode=MockImageRefMode,
|
|
378
|
+
strategy="basic",
|
|
379
|
+
)
|
|
380
|
+
except ImportError as e:
|
|
381
|
+
self.log(f"Basic docling structure failed: {e}")
|
|
382
|
+
|
|
383
|
+
# Strategy 4: Complete fallback - return None to indicate failure
|
|
384
|
+
return None
|
|
385
|
+
|
|
386
|
+
def _create_advanced_converter(self, docling_imports: DoclingImports) -> Any:
|
|
387
|
+
"""Create advanced converter with pipeline options if available."""
|
|
388
|
+
try:
|
|
389
|
+
from docling.datamodel.pipeline_options import PdfPipelineOptions # type: ignore[import-untyped]
|
|
390
|
+
from docling.document_converter import PdfFormatOption # type: ignore[import-untyped]
|
|
391
|
+
|
|
392
|
+
document_converter = docling_imports.document_converter
|
|
393
|
+
input_format = docling_imports.input_format
|
|
394
|
+
|
|
395
|
+
# Create basic pipeline options
|
|
396
|
+
pipeline_options = PdfPipelineOptions()
|
|
397
|
+
|
|
398
|
+
# Configure OCR if specified and available
|
|
399
|
+
if self.ocr_engine:
|
|
400
|
+
try:
|
|
401
|
+
from docling.models.factories import get_ocr_factory # type: ignore[import-untyped]
|
|
402
|
+
|
|
403
|
+
pipeline_options.do_ocr = True
|
|
404
|
+
ocr_factory = get_ocr_factory(allow_external_plugins=False)
|
|
405
|
+
ocr_options = ocr_factory.create_options(kind=self.ocr_engine)
|
|
406
|
+
pipeline_options.ocr_options = ocr_options
|
|
407
|
+
self.log(f"Configured OCR with engine: {self.ocr_engine}")
|
|
408
|
+
except Exception as e: # noqa: BLE001
|
|
409
|
+
self.log(f"Could not configure OCR: {e}, proceeding without OCR")
|
|
410
|
+
pipeline_options.do_ocr = False
|
|
411
|
+
|
|
412
|
+
# Create format options
|
|
413
|
+
pdf_format_option = PdfFormatOption(pipeline_options=pipeline_options)
|
|
414
|
+
format_options = {}
|
|
415
|
+
if hasattr(input_format, "PDF"):
|
|
416
|
+
format_options[input_format.PDF] = pdf_format_option
|
|
417
|
+
if hasattr(input_format, "IMAGE"):
|
|
418
|
+
format_options[input_format.IMAGE] = pdf_format_option
|
|
92
419
|
|
|
93
|
-
|
|
94
|
-
file_list (list[BaseFileComponent.BaseFile]): List of files to process.
|
|
420
|
+
return document_converter(format_options=format_options)
|
|
95
421
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
422
|
+
except Exception as e: # noqa: BLE001
|
|
423
|
+
self.log(f"Could not create advanced converter: {e}, using basic converter")
|
|
424
|
+
return docling_imports.document_converter()
|
|
99
425
|
|
|
100
|
-
|
|
101
|
-
|
|
426
|
+
def _is_docling_compatible(self, file_path: str) -> bool:
|
|
427
|
+
"""Check if file is compatible with Docling processing."""
|
|
428
|
+
# All VALID_EXTENSIONS are Docling compatible (except for TEXT_FILE_TYPES which may overlap)
|
|
429
|
+
docling_extensions = [
|
|
430
|
+
".adoc",
|
|
431
|
+
".asciidoc",
|
|
432
|
+
".asc",
|
|
433
|
+
".bmp",
|
|
434
|
+
".csv",
|
|
435
|
+
".dotx",
|
|
436
|
+
".dotm",
|
|
437
|
+
".docm",
|
|
438
|
+
".docx",
|
|
439
|
+
".htm",
|
|
440
|
+
".html",
|
|
441
|
+
".jpeg",
|
|
442
|
+
".json",
|
|
443
|
+
".md",
|
|
444
|
+
".pdf",
|
|
445
|
+
".png",
|
|
446
|
+
".potx",
|
|
447
|
+
".ppsx",
|
|
448
|
+
".pptm",
|
|
449
|
+
".potm",
|
|
450
|
+
".ppsm",
|
|
451
|
+
".pptx",
|
|
452
|
+
".tiff",
|
|
453
|
+
".txt",
|
|
454
|
+
".xls",
|
|
455
|
+
".xlsx",
|
|
456
|
+
".xhtml",
|
|
457
|
+
".xml",
|
|
458
|
+
".webp",
|
|
459
|
+
]
|
|
460
|
+
return any(file_path.lower().endswith(ext) for ext in docling_extensions)
|
|
461
|
+
|
|
462
|
+
def process_files(
|
|
463
|
+
self,
|
|
464
|
+
file_list: list[BaseFileComponent.BaseFile],
|
|
465
|
+
) -> list[BaseFileComponent.BaseFile]:
|
|
466
|
+
"""Process files using standard parsing or Docling based on advanced_mode and file type."""
|
|
467
|
+
|
|
468
|
+
def process_file_standard(file_path: str, *, silent_errors: bool = False) -> Data | None:
|
|
469
|
+
"""Process a single file using standard text parsing."""
|
|
102
470
|
try:
|
|
103
471
|
return parse_text_file_to_data(file_path, silent_errors=silent_errors)
|
|
104
472
|
except FileNotFoundError as e:
|
|
@@ -114,27 +482,179 @@ class FileComponent(BaseFileComponent):
|
|
|
114
482
|
raise
|
|
115
483
|
return None
|
|
116
484
|
|
|
485
|
+
def process_file_docling(file_path: str, *, silent_errors: bool = False) -> Data | None:
|
|
486
|
+
"""Process a single file using Docling if compatible, otherwise standard processing."""
|
|
487
|
+
# Try Docling first if file is compatible and advanced mode is enabled
|
|
488
|
+
try:
|
|
489
|
+
return self._process_with_docling_and_export(file_path)
|
|
490
|
+
except Exception as e: # noqa: BLE001
|
|
491
|
+
self.log(f"Docling processing failed for {file_path}: {e}, falling back to standard processing")
|
|
492
|
+
if not silent_errors:
|
|
493
|
+
# Return error data instead of raising
|
|
494
|
+
return Data(data={"error": f"Docling processing failed: {e}", "file_path": file_path})
|
|
495
|
+
|
|
496
|
+
return None
|
|
497
|
+
|
|
117
498
|
if not file_list:
|
|
118
499
|
msg = "No files to process."
|
|
119
500
|
raise ValueError(msg)
|
|
120
501
|
|
|
502
|
+
file_path = str(file_list[0].path)
|
|
503
|
+
if self.advanced_mode and self._is_docling_compatible(file_path):
|
|
504
|
+
processed_data = process_file_docling(file_path)
|
|
505
|
+
if not processed_data:
|
|
506
|
+
msg = f"Failed to process file with Docling: {file_path}"
|
|
507
|
+
raise ValueError(msg)
|
|
508
|
+
|
|
509
|
+
# Serialize processed data to match Data structure
|
|
510
|
+
serialized_data = processed_data.serialize_model()
|
|
511
|
+
|
|
512
|
+
# Now, if doc is nested, we need to unravel it
|
|
513
|
+
clean_data: list[Data | None] = [processed_data]
|
|
514
|
+
|
|
515
|
+
# This is where we've manually processed the data
|
|
516
|
+
try:
|
|
517
|
+
if "exported_content" not in serialized_data:
|
|
518
|
+
clean_data = [
|
|
519
|
+
Data(
|
|
520
|
+
data={
|
|
521
|
+
"file_path": file_path,
|
|
522
|
+
**(
|
|
523
|
+
item["element"]
|
|
524
|
+
if "element" in item
|
|
525
|
+
else {k: v for k, v in item.items() if k != "file_path"}
|
|
526
|
+
),
|
|
527
|
+
}
|
|
528
|
+
)
|
|
529
|
+
for item in serialized_data["doc"]
|
|
530
|
+
]
|
|
531
|
+
except Exception as _: # noqa: BLE001
|
|
532
|
+
raise ValueError(serialized_data) from None
|
|
533
|
+
|
|
534
|
+
# Repeat file_list to match the number of processed data elements
|
|
535
|
+
final_data: list[Data | None] = clean_data
|
|
536
|
+
return self.rollup_data(file_list, final_data)
|
|
537
|
+
|
|
121
538
|
concurrency = 1 if not self.use_multithreading else max(1, self.concurrency_multithreading)
|
|
122
539
|
file_count = len(file_list)
|
|
123
540
|
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
541
|
+
self.log(f"Starting parallel processing of {file_count} files with concurrency: {concurrency}.")
|
|
542
|
+
file_paths = [str(file.path) for file in file_list]
|
|
543
|
+
my_data = parallel_load_data(
|
|
544
|
+
file_paths,
|
|
545
|
+
silent_errors=self.silent_errors,
|
|
546
|
+
load_function=process_file_standard,
|
|
547
|
+
max_concurrency=concurrency,
|
|
548
|
+
)
|
|
549
|
+
|
|
550
|
+
return self.rollup_data(file_list, my_data)
|
|
551
|
+
|
|
552
|
+
def load_files_advanced(self) -> DataFrame:
|
|
553
|
+
"""Load files using advanced Docling processing and export to an advanced format."""
|
|
554
|
+
# TODO: Update
|
|
555
|
+
self.markdown = False
|
|
556
|
+
return self.load_files()
|
|
557
|
+
|
|
558
|
+
def load_files_markdown(self) -> Message:
|
|
559
|
+
"""Load files using advanced Docling processing and export to Markdown format."""
|
|
560
|
+
self.markdown = True
|
|
561
|
+
result = self.load_files()
|
|
562
|
+
return Message(text=str(result.text[0]))
|
|
563
|
+
|
|
564
|
+
def _process_with_docling_and_export(self, file_path: str) -> Data:
|
|
565
|
+
"""Process a single file with Docling and export to the specified format."""
|
|
566
|
+
# Import docling components only when needed
|
|
567
|
+
docling_imports = self._try_import_docling()
|
|
568
|
+
|
|
569
|
+
if docling_imports is None:
|
|
570
|
+
msg = "Docling not available for advanced processing"
|
|
571
|
+
raise ImportError(msg)
|
|
572
|
+
|
|
573
|
+
conversion_status = docling_imports.conversion_status
|
|
574
|
+
document_converter = docling_imports.document_converter
|
|
575
|
+
image_ref_mode = docling_imports.image_ref_mode
|
|
576
|
+
|
|
577
|
+
try:
|
|
578
|
+
# Create converter based on strategy and pipeline setting
|
|
579
|
+
if docling_imports.strategy == "latest" and self.pipeline == "standard":
|
|
580
|
+
converter = self._create_advanced_converter(docling_imports)
|
|
581
|
+
else:
|
|
582
|
+
# Use basic converter for compatibility
|
|
583
|
+
converter = document_converter()
|
|
584
|
+
self.log("Using basic DocumentConverter for Docling processing")
|
|
585
|
+
|
|
586
|
+
# Process single file
|
|
587
|
+
result = converter.convert(file_path)
|
|
588
|
+
|
|
589
|
+
# Check if conversion was successful
|
|
590
|
+
success = False
|
|
591
|
+
if hasattr(result, "status"):
|
|
592
|
+
if hasattr(conversion_status, "SUCCESS"):
|
|
593
|
+
success = result.status == conversion_status.SUCCESS
|
|
594
|
+
else:
|
|
595
|
+
success = str(result.status).lower() == "success"
|
|
596
|
+
elif hasattr(result, "document"):
|
|
597
|
+
# If no status but has document, assume success
|
|
598
|
+
success = result.document is not None
|
|
599
|
+
|
|
600
|
+
if not success:
|
|
601
|
+
return Data(data={"error": "Docling conversion failed", "file_path": file_path})
|
|
602
|
+
|
|
603
|
+
if self.markdown:
|
|
604
|
+
self.log("Exporting document to Markdown format")
|
|
605
|
+
# Export the document to the specified format
|
|
606
|
+
exported_content = self._export_document(result.document, image_ref_mode)
|
|
607
|
+
|
|
608
|
+
return Data(
|
|
609
|
+
text=exported_content,
|
|
610
|
+
data={
|
|
611
|
+
"exported_content": exported_content,
|
|
612
|
+
"export_format": self.EXPORT_FORMAT,
|
|
613
|
+
"file_path": file_path,
|
|
614
|
+
},
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
return Data(
|
|
618
|
+
data={
|
|
619
|
+
"doc": self.docling_to_dataframe_simple(result.document.export_to_dict()),
|
|
620
|
+
"export_format": self.EXPORT_FORMAT,
|
|
621
|
+
"file_path": file_path,
|
|
622
|
+
}
|
|
137
623
|
)
|
|
138
624
|
|
|
139
|
-
|
|
140
|
-
|
|
625
|
+
except Exception as e: # noqa: BLE001
|
|
626
|
+
return Data(data={"error": f"Docling processing error: {e!s}", "file_path": file_path})
|
|
627
|
+
|
|
628
|
+
def docling_to_dataframe_simple(self, doc):
|
|
629
|
+
"""Extract all text elements into a simple DataFrame."""
|
|
630
|
+
return [
|
|
631
|
+
{
|
|
632
|
+
"page_no": text["prov"][0]["page_no"] if text["prov"] else None,
|
|
633
|
+
"label": text["label"],
|
|
634
|
+
"text": text["text"],
|
|
635
|
+
"level": text.get("level", None), # for headers
|
|
636
|
+
}
|
|
637
|
+
for text in doc["texts"]
|
|
638
|
+
]
|
|
639
|
+
|
|
640
|
+
def _export_document(self, document: Any, image_ref_mode: type[Enum]) -> str:
|
|
641
|
+
"""Export document to Markdown format with placeholder images."""
|
|
642
|
+
try:
|
|
643
|
+
image_mode = (
|
|
644
|
+
image_ref_mode(self.IMAGE_MODE) if hasattr(image_ref_mode, self.IMAGE_MODE) else self.IMAGE_MODE
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
# Always export to Markdown since it's fixed
|
|
648
|
+
return document.export_to_markdown(
|
|
649
|
+
image_mode=image_mode,
|
|
650
|
+
image_placeholder=self.md_image_placeholder,
|
|
651
|
+
page_break_placeholder=self.md_page_break_placeholder,
|
|
652
|
+
)
|
|
653
|
+
|
|
654
|
+
except Exception as e: # noqa: BLE001
|
|
655
|
+
self.log(f"Markdown export failed: {e}, using basic text export")
|
|
656
|
+
# Fallback to basic text export
|
|
657
|
+
try:
|
|
658
|
+
return document.export_to_text()
|
|
659
|
+
except Exception: # noqa: BLE001
|
|
660
|
+
return str(document)
|