langflow-base-nightly 0.5.0.dev37__py3-none-any.whl → 0.5.0.dev39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (397) hide show
  1. langflow/__main__.py +1 -1
  2. langflow/alembic/versions/0882f9657f22_encrypt_existing_mcp_auth_settings_.py +122 -0
  3. langflow/alembic/versions/4e5980a44eaa_fix_date_times_again.py +24 -30
  4. langflow/alembic/versions/58b28437a398_modify_nullable.py +6 -6
  5. langflow/alembic/versions/79e675cb6752_change_datetime_type.py +24 -30
  6. langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +12 -13
  7. langflow/api/build.py +21 -26
  8. langflow/api/health_check_router.py +3 -3
  9. langflow/api/utils.py +3 -3
  10. langflow/api/v1/callback.py +2 -2
  11. langflow/api/v1/chat.py +19 -31
  12. langflow/api/v1/endpoints.py +10 -10
  13. langflow/api/v1/flows.py +1 -1
  14. langflow/api/v1/knowledge_bases.py +3 -3
  15. langflow/api/v1/mcp.py +12 -12
  16. langflow/api/v1/mcp_projects.py +405 -120
  17. langflow/api/v1/mcp_utils.py +8 -8
  18. langflow/api/v1/schemas.py +2 -7
  19. langflow/api/v1/store.py +1 -1
  20. langflow/api/v1/validate.py +2 -2
  21. langflow/api/v1/voice_mode.py +58 -62
  22. langflow/api/v2/files.py +2 -2
  23. langflow/api/v2/mcp.py +10 -9
  24. langflow/base/composio/composio_base.py +21 -2
  25. langflow/base/data/docling_utils.py +194 -0
  26. langflow/base/embeddings/aiml_embeddings.py +1 -1
  27. langflow/base/flow_processing/utils.py +1 -2
  28. langflow/base/io/__init__.py +0 -1
  29. langflow/base/langwatch/utils.py +2 -1
  30. langflow/base/mcp/util.py +49 -47
  31. langflow/base/prompts/api_utils.py +1 -1
  32. langflow/base/tools/flow_tool.py +2 -2
  33. langflow/base/tools/run_flow.py +2 -6
  34. langflow/components/FAISS/__init__.py +34 -0
  35. langflow/components/Notion/add_content_to_page.py +2 -2
  36. langflow/components/Notion/list_database_properties.py +2 -2
  37. langflow/components/Notion/list_pages.py +2 -2
  38. langflow/components/Notion/page_content_viewer.py +2 -2
  39. langflow/components/Notion/update_page_property.py +1 -1
  40. langflow/components/agentql/agentql_api.py +2 -10
  41. langflow/components/agents/agent.py +249 -55
  42. langflow/components/agents/mcp_component.py +14 -14
  43. langflow/components/anthropic/anthropic.py +5 -4
  44. langflow/components/assemblyai/assemblyai_get_subtitles.py +2 -2
  45. langflow/components/assemblyai/assemblyai_lemur.py +2 -2
  46. langflow/components/assemblyai/assemblyai_list_transcripts.py +2 -2
  47. langflow/components/assemblyai/assemblyai_poll_transcript.py +2 -2
  48. langflow/components/assemblyai/assemblyai_start_transcript.py +2 -2
  49. langflow/components/cassandra/__init__.py +40 -0
  50. langflow/components/chroma/__init__.py +34 -0
  51. langflow/components/clickhouse/__init__.py +34 -0
  52. langflow/components/couchbase/__init__.py +34 -0
  53. langflow/components/data/file.py +575 -55
  54. langflow/components/data/url.py +1 -1
  55. langflow/components/datastax/__init__.py +3 -3
  56. langflow/components/datastax/astra_assistant_manager.py +3 -3
  57. langflow/components/datastax/create_assistant.py +1 -2
  58. langflow/components/deactivated/merge_data.py +1 -2
  59. langflow/components/deactivated/sub_flow.py +6 -7
  60. langflow/components/deactivated/vectara_self_query.py +3 -3
  61. langflow/components/docling/__init__.py +0 -198
  62. langflow/components/docling/docling_inline.py +1 -1
  63. langflow/components/elastic/__init__.py +37 -0
  64. langflow/components/embeddings/text_embedder.py +3 -3
  65. langflow/components/firecrawl/firecrawl_extract_api.py +2 -9
  66. langflow/components/google/gmail.py +1 -1
  67. langflow/components/google/google_generative_ai.py +5 -11
  68. langflow/components/groq/groq.py +4 -3
  69. langflow/components/helpers/current_date.py +2 -3
  70. langflow/components/helpers/memory.py +1 -1
  71. langflow/components/ibm/watsonx.py +1 -1
  72. langflow/components/ibm/watsonx_embeddings.py +1 -1
  73. langflow/components/langwatch/langwatch.py +3 -3
  74. langflow/components/logic/flow_tool.py +2 -2
  75. langflow/components/logic/notify.py +1 -1
  76. langflow/components/logic/run_flow.py +2 -3
  77. langflow/components/logic/sub_flow.py +4 -5
  78. langflow/components/mem0/mem0_chat_memory.py +2 -8
  79. langflow/components/milvus/__init__.py +34 -0
  80. langflow/components/mongodb/__init__.py +34 -0
  81. langflow/components/nvidia/nvidia.py +3 -3
  82. langflow/components/olivya/olivya.py +7 -7
  83. langflow/components/ollama/ollama.py +9 -6
  84. langflow/components/perplexity/perplexity.py +3 -13
  85. langflow/components/pgvector/__init__.py +34 -0
  86. langflow/components/pinecone/__init__.py +34 -0
  87. langflow/components/processing/batch_run.py +8 -8
  88. langflow/components/processing/data_operations.py +2 -2
  89. langflow/components/processing/merge_data.py +1 -2
  90. langflow/components/processing/message_to_data.py +2 -3
  91. langflow/components/processing/parse_json_data.py +1 -1
  92. langflow/components/prototypes/python_function.py +2 -3
  93. langflow/components/qdrant/__init__.py +34 -0
  94. langflow/components/redis/__init__.py +36 -2
  95. langflow/components/redis/redis.py +75 -29
  96. langflow/components/redis/redis_chat.py +43 -0
  97. langflow/components/serpapi/serp.py +1 -1
  98. langflow/components/supabase/__init__.py +37 -0
  99. langflow/components/tavily/tavily_extract.py +1 -1
  100. langflow/components/tavily/tavily_search.py +1 -1
  101. langflow/components/tools/calculator.py +2 -2
  102. langflow/components/tools/python_code_structured_tool.py +3 -10
  103. langflow/components/tools/python_repl.py +2 -2
  104. langflow/components/tools/searxng.py +3 -3
  105. langflow/components/tools/serp_api.py +2 -2
  106. langflow/components/tools/tavily_search_tool.py +2 -2
  107. langflow/components/tools/yahoo_finance.py +1 -1
  108. langflow/components/twelvelabs/video_embeddings.py +4 -4
  109. langflow/components/upstash/__init__.py +34 -0
  110. langflow/components/vectara/__init__.py +37 -0
  111. langflow/components/vectorstores/__init__.py +0 -69
  112. langflow/components/vectorstores/local_db.py +2 -1
  113. langflow/components/weaviate/__init__.py +34 -0
  114. langflow/components/yahoosearch/yahoo.py +1 -1
  115. langflow/components/youtube/trending.py +3 -4
  116. langflow/custom/attributes.py +2 -1
  117. langflow/custom/code_parser/code_parser.py +1 -1
  118. langflow/custom/custom_component/base_component.py +1 -1
  119. langflow/custom/custom_component/component.py +16 -2
  120. langflow/custom/dependency_analyzer.py +165 -0
  121. langflow/custom/directory_reader/directory_reader.py +7 -7
  122. langflow/custom/directory_reader/utils.py +1 -2
  123. langflow/custom/utils.py +63 -45
  124. langflow/events/event_manager.py +1 -1
  125. langflow/frontend/assets/{SlackIcon-CnvyOamQ.js → SlackIcon-Cr3Q15Px.js} +1 -1
  126. langflow/frontend/assets/{Wikipedia-nyTEXdr2.js → Wikipedia-GxM5sPdM.js} +1 -1
  127. langflow/frontend/assets/{Wolfram-BYMQkNSq.js → Wolfram-BN3-VOCA.js} +1 -1
  128. langflow/frontend/assets/{index-DZTC5pdT.js → index-28oOcafk.js} +1 -1
  129. langflow/frontend/assets/{index-ChXJpBz4.js → index-2wSXqBtB.js} +1 -1
  130. langflow/frontend/assets/{index-BB15_iOb.js → index-3wW7BClE.js} +1 -1
  131. langflow/frontend/assets/{index-DKHNourL.js → index-6pyH3ZJB.js} +1 -1
  132. langflow/frontend/assets/{index-BvwZfF2i.js → index-AWCSdofD.js} +1 -1
  133. langflow/frontend/assets/{index-Bvxg4_ux.js → index-B2Zgv_xv.js} +1 -1
  134. langflow/frontend/assets/{index-Bd6WtbKA.js → index-B2ptVQGM.js} +1 -1
  135. langflow/frontend/assets/{index-C7QWbnLK.js → index-B3TANVes.js} +1 -1
  136. langflow/frontend/assets/{index-CpvYQ0ug.js → index-B4yCvZKV.js} +1 -1
  137. langflow/frontend/assets/{index-Dg-63Si_.js → index-BC65VuWx.js} +1 -1
  138. langflow/frontend/assets/{index-C6jri9Wm.js → index-BCDSei1q.js} +1 -1
  139. langflow/frontend/assets/{index-OazXJdEl.js → index-BJy50PvP.js} +1 -1
  140. langflow/frontend/assets/{index-CWdkbVsd.js → index-BKseQQ2I.js} +1 -1
  141. langflow/frontend/assets/{index-CaQ_H9ww.js → index-BLTxEeTi.js} +1 -1
  142. langflow/frontend/assets/{index-DGRMNe9n.js → index-BRg1f4Mu.js} +1 -1
  143. langflow/frontend/assets/{index-D8lOi1GI.js → index-BS8Vo8nc.js} +1 -1
  144. langflow/frontend/assets/{index-B748uLP1.js → index-BTKOU4xC.js} +1 -1
  145. langflow/frontend/assets/{index-Dqd4RjYA.js → index-BVwJDmw-.js} +1 -1
  146. langflow/frontend/assets/{index-DbMFlnHE.js → index-BWYuQ2Sj.js} +1 -1
  147. langflow/frontend/assets/{index-BEMw2Np8.js → index-BWdLILDG.js} +1 -1
  148. langflow/frontend/assets/{index-BmX5CoED.js → index-BZcw4827.js} +1 -1
  149. langflow/frontend/assets/{index-CyPvTB63.js → index-Bbi87Ve4.js} +1 -1
  150. langflow/frontend/assets/{index-BTEW9e8P.js → index-Bf0IYKLd.js} +1 -1
  151. langflow/frontend/assets/{index-BZgXW854.js → index-Bg5nrMRh.js} +1 -1
  152. langflow/frontend/assets/{index-BBxAPk1y.js → index-BiC280Nx.js} +1 -1
  153. langflow/frontend/assets/{index-BR0bkVqX.js → index-BiKKN6FR.js} +1 -1
  154. langflow/frontend/assets/{index-CTrt1Q_j.js → index-Bief6eyJ.js} +1 -1
  155. langflow/frontend/assets/{index-D5_DsUJc.js → index-BkXec1Yf.js} +1 -1
  156. langflow/frontend/assets/{index-CZQ9rXNa.js → index-Bnl6QHtP.js} +1 -1
  157. langflow/frontend/assets/{index-BChjg6Az.js → index-BpxbUiZD.js} +1979 -1979
  158. langflow/frontend/assets/{index-BOeo01QB.js → index-BrJV8psX.js} +1 -1
  159. langflow/frontend/assets/{index-DysKpOuj.js → index-BwLWcUXL.js} +1 -1
  160. langflow/frontend/assets/{index-Bnqod3vk.js → index-Bx7dBY26.js} +1 -1
  161. langflow/frontend/assets/{index-D3DDfngy.js → index-C-EdnFdA.js} +1 -1
  162. langflow/frontend/assets/{index-Bsa0xZyL.js → index-C-Xfg4cD.js} +1 -1
  163. langflow/frontend/assets/{index-BTrsh9LS.js → index-C1f2wMat.js} +1 -1
  164. langflow/frontend/assets/index-C1xroOlH.css +1 -0
  165. langflow/frontend/assets/{index-B1YN7oMV.js → index-C3KequvP.js} +1 -1
  166. langflow/frontend/assets/{index-DzW2mfkK.js → index-C3ZjKdCD.js} +1 -1
  167. langflow/frontend/assets/{index-ajRge-Mg.js → index-C3l0zYn0.js} +1 -1
  168. langflow/frontend/assets/{index-cvZdgWHQ.js → index-C3yvArUT.js} +1 -1
  169. langflow/frontend/assets/{index-C-2hghRJ.js → index-C9Cxnkl8.js} +1 -1
  170. langflow/frontend/assets/{index-BhIOhlCH.js → index-CBc8fEAE.js} +1 -1
  171. langflow/frontend/assets/{index-B3Sur4Z3.js → index-CBvrGgID.js} +1 -1
  172. langflow/frontend/assets/{index-CCePCqkT.js → index-CD-PqGCY.js} +1 -1
  173. langflow/frontend/assets/{index-8yMsjVV2.js → index-CGO1CiUr.js} +1 -1
  174. langflow/frontend/assets/{index-DF5VwgU6.js → index-CH5UVA9b.js} +1 -1
  175. langflow/frontend/assets/{index-dcnYpT9N.js → index-CLJeJYjH.js} +1 -1
  176. langflow/frontend/assets/{index-DfxYyS3M.js → index-CMZ79X-Y.js} +1 -1
  177. langflow/frontend/assets/{index-ya2uXE8v.js → index-CMzfJKiW.js} +1 -1
  178. langflow/frontend/assets/{index-DkelbYy7.js → index-CNw1H-Wc.js} +1 -1
  179. langflow/frontend/assets/{index-DytJENYD.js → index-CPHEscq9.js} +1 -1
  180. langflow/frontend/assets/{index-Bv8h2Z-q.js → index-CRPKJZw9.js} +1 -1
  181. langflow/frontend/assets/{index-D-9TI74R.js → index-CRPyCfYy.js} +1 -1
  182. langflow/frontend/assets/{index-BLGYN-9b.js → index-CRcMqCIj.js} +1 -1
  183. langflow/frontend/assets/{index-tVYiABdp.js → index-CUVDws8F.js} +1 -1
  184. langflow/frontend/assets/{index-CpcbQZIF.js → index-CVWQfRYZ.js} +1 -1
  185. langflow/frontend/assets/{index-DPCzHdsC.js → index-CVl6MbaM.js} +1 -1
  186. langflow/frontend/assets/{index-DkXy1WFo.js → index-CVwWoX99.js} +1 -1
  187. langflow/frontend/assets/{index-DK1Ptcc4.js → index-CWPzZtSx.js} +1 -1
  188. langflow/frontend/assets/{index-DHq8TQPB.js → index-CZqRL9DE.js} +1 -1
  189. langflow/frontend/assets/{index-DnEGCgih.js → index-CdIf07Rw.js} +1 -1
  190. langflow/frontend/assets/{index-BIQQCMvz.js → index-Cewy7JZE.js} +1 -1
  191. langflow/frontend/assets/{index-D8GJngXa.js → index-CfwLpbMM.js} +1 -1
  192. langflow/frontend/assets/{index-C_TdzfAn.js → index-CiR1dxI4.js} +1 -1
  193. langflow/frontend/assets/{index-BzL_EoKd.js → index-CiixOzDG.js} +1 -1
  194. langflow/frontend/assets/{index-Boso-xEw.js → index-ClsuDmR6.js} +1 -1
  195. langflow/frontend/assets/{index-8WdfSTTz.js → index-CmEYYRN1.js} +1 -1
  196. langflow/frontend/assets/{index-FUxmznS-.js → index-Co20d-eQ.js} +1 -1
  197. langflow/frontend/assets/{index-C82JjCPD.js → index-CpzXS6md.js} +1 -1
  198. langflow/frontend/assets/{index-DIDDfmlJ.js → index-Cqpzl1J4.js} +1 -1
  199. langflow/frontend/assets/{index-_UcqeEjm.js → index-CtVIONP2.js} +1 -1
  200. langflow/frontend/assets/{index-Gkrq-vzm.js → index-CuFXdTx4.js} +1 -1
  201. langflow/frontend/assets/{index-WPFivmdQ.js → index-Cyd2HtHK.js} +1 -1
  202. langflow/frontend/assets/{index-BFp_O-c9.js → index-D-1tA8Dt.js} +1 -1
  203. langflow/frontend/assets/{index-BqPpO6KG.js → index-D-KY3kkq.js} +1 -1
  204. langflow/frontend/assets/{index-Db71w3lq.js → index-D-_B1a8v.js} +1 -1
  205. langflow/frontend/assets/{index-BIzTEqFh.js → index-D14EWPyZ.js} +1 -1
  206. langflow/frontend/assets/{index-BbJjt5m4.js → index-D2N3l-cw.js} +1 -1
  207. langflow/frontend/assets/{index-DCRk27Tp.js → index-D5ETnvJa.js} +1 -1
  208. langflow/frontend/assets/{index-CvcEzq4x.js → index-D7kquVv2.js} +1 -1
  209. langflow/frontend/assets/{index-Q9vDw0Xl.js → index-DA6-bvgN.js} +1 -1
  210. langflow/frontend/assets/{index-l7bzB8Ex.js → index-DDWBeudF.js} +1 -1
  211. langflow/frontend/assets/{index-BCCGvqay.js → index-DDcMAaG4.js} +1 -1
  212. langflow/frontend/assets/{index-pCQ_yw8m.js → index-DHgomBdh.js} +1 -1
  213. langflow/frontend/assets/{index-BxEuHa76.js → index-DJP-ss47.js} +1 -1
  214. langflow/frontend/assets/{index-BbRm7beF.js → index-DQ7VYqQc.js} +1 -1
  215. langflow/frontend/assets/{index-Car-zdor.js → index-DTqbvGC0.js} +1 -1
  216. langflow/frontend/assets/{index-BRxvproo.js → index-DUpri6zF.js} +1 -1
  217. langflow/frontend/assets/{index-BQ6NUdMY.js → index-DV3utZDZ.js} +1 -1
  218. langflow/frontend/assets/{index-DjQETUy8.js → index-DXRfN4HV.js} +1 -1
  219. langflow/frontend/assets/{index-DfngcQxO.js → index-Db9dYSzy.js} +1 -1
  220. langflow/frontend/assets/{index-rXV1G1aB.js → index-DdtMEn6I.js} +1 -1
  221. langflow/frontend/assets/{index-DmMDPoi0.js → index-DfDhMHgQ.js} +1 -1
  222. langflow/frontend/assets/{index-DJB12jIC.js → index-Dfe7qfvf.js} +1 -1
  223. langflow/frontend/assets/{index-C_veJlEb.js → index-DhtZ5hx8.js} +1 -1
  224. langflow/frontend/assets/{index-CQMoqLAu.js → index-DiB3CTo8.js} +1 -1
  225. langflow/frontend/assets/{index-DVlceYFD.js → index-DiGWASY5.js} +1 -1
  226. langflow/frontend/assets/{index-Du_18NCU.js → index-Dl5amdBz.js} +1 -1
  227. langflow/frontend/assets/{index-CYDAYm-i.js → index-DlD4dXlZ.js} +1 -1
  228. langflow/frontend/assets/{index-CLPdN-q6.js → index-DmeiHnfl.js} +1 -1
  229. langflow/frontend/assets/index-Dmu-X5-4.js +1 -0
  230. langflow/frontend/assets/{index-BzEUlaw_.js → index-DpVWih90.js} +1 -1
  231. langflow/frontend/assets/{index-D6PSjHxP.js → index-DrDrcajG.js} +1 -1
  232. langflow/frontend/assets/{index-Dq5ilsem.js → index-Du-pc0KE.js} +1 -1
  233. langflow/frontend/assets/{index-CYe8Ipef.js → index-DwPkMTaY.js} +1 -1
  234. langflow/frontend/assets/{index-BVEZDXxS.js → index-DwQEZe3C.js} +1 -1
  235. langflow/frontend/assets/{index-BvT7L317.js → index-DyJFTK24.js} +1 -1
  236. langflow/frontend/assets/{index-HK3bVMYA.js → index-J38wh62w.js} +1 -1
  237. langflow/frontend/assets/{index-CCxGSSTT.js → index-Kwdl-e29.js} +1 -1
  238. langflow/frontend/assets/{index-BOB_zsjl.js → index-OwPvCmpW.js} +1 -1
  239. langflow/frontend/assets/{index-Dsps-jKu.js → index-Tw3Os-DN.js} +1 -1
  240. langflow/frontend/assets/{index-CFDvOtKC.js → index-X0guhYF8.js} +1 -1
  241. langflow/frontend/assets/{index-BX5D-USa.js → index-dJWNxIRH.js} +1 -1
  242. langflow/frontend/assets/{index-BRYjyhAd.js → index-dcJ8-agu.js} +1 -1
  243. langflow/frontend/assets/{index-Ui4xUImO.js → index-eo2mAtL-.js} +1 -1
  244. langflow/frontend/assets/{index-CxvP91st.js → index-hG24k5xJ.js} +1 -1
  245. langflow/frontend/assets/{index-CVQmT7ZL.js → index-h_aSZHf3.js} +1 -1
  246. langflow/frontend/assets/{index-BIXaW2aY.js → index-hbndqB9B.js} +1 -1
  247. langflow/frontend/assets/{index-DIkNW9Cd.js → index-iJngutFo.js} +1 -1
  248. langflow/frontend/assets/{index-BWmPX4iQ.js → index-lTpteg8t.js} +1 -1
  249. langflow/frontend/assets/{index-xuIrH2Dq.js → index-lZX9AvZW.js} +1 -1
  250. langflow/frontend/assets/{index-yCHsaqs8.js → index-m8QA6VNM.js} +1 -1
  251. langflow/frontend/assets/{index-BkPYpfgw.js → index-o0D2S7xW.js} +1 -1
  252. langflow/frontend/assets/{index-DpClkXIV.js → index-ovFJ_0J6.js} +1 -1
  253. langflow/frontend/assets/{index-CmplyEaa.js → index-pYJJOcma.js} +1 -1
  254. langflow/frontend/assets/{index-CJo_cyWW.js → index-sI75DsdM.js} +1 -1
  255. langflow/frontend/assets/{index-nVwHLjuV.js → index-xvFOmxx4.js} +1 -1
  256. langflow/frontend/assets/{index-LbYjHKkn.js → index-z3SRY-mX.js} +1 -1
  257. langflow/frontend/assets/lazyIconImports-D97HEZkE.js +2 -0
  258. langflow/frontend/assets/{use-post-add-user-BrBYH9eR.js → use-post-add-user-C0MdTpQ5.js} +1 -1
  259. langflow/frontend/index.html +2 -2
  260. langflow/graph/edge/base.py +2 -3
  261. langflow/graph/graph/base.py +15 -13
  262. langflow/graph/graph/constants.py +3 -0
  263. langflow/graph/utils.py +6 -6
  264. langflow/graph/vertex/base.py +4 -5
  265. langflow/graph/vertex/param_handler.py +1 -1
  266. langflow/graph/vertex/vertex_types.py +2 -2
  267. langflow/helpers/flow.py +1 -1
  268. langflow/initial_setup/setup.py +32 -30
  269. langflow/initial_setup/starter_projects/Basic Prompt Chaining.json +26 -0
  270. langflow/initial_setup/starter_projects/Basic Prompting.json +26 -0
  271. langflow/initial_setup/starter_projects/Blog Writer.json +58 -2
  272. langflow/initial_setup/starter_projects/Custom Component Generator.json +37 -2
  273. langflow/initial_setup/starter_projects/Document Q&A.json +27 -1
  274. langflow/initial_setup/starter_projects/Financial Report Parser.json +43 -0
  275. langflow/initial_setup/starter_projects/Hybrid Search RAG.json +83 -1
  276. langflow/initial_setup/starter_projects/Image Sentiment Analysis.json +43 -0
  277. langflow/initial_setup/starter_projects/Instagram Copywriter.json +51 -3
  278. langflow/initial_setup/starter_projects/Invoice Summarizer.json +40 -1
  279. langflow/initial_setup/starter_projects/Knowledge Ingestion.json +73 -2
  280. langflow/initial_setup/starter_projects/Knowledge Retrieval.json +63 -0
  281. langflow/initial_setup/starter_projects/Market Research.json +59 -3
  282. langflow/initial_setup/starter_projects/Meeting Summary.json +101 -6
  283. langflow/initial_setup/starter_projects/Memory Chatbot.json +37 -2
  284. langflow/initial_setup/starter_projects/News Aggregator.json +63 -3
  285. langflow/initial_setup/starter_projects/Nvidia Remix.json +69 -4
  286. langflow/initial_setup/starter_projects/Pok/303/251dex Agent.json" +48 -1
  287. langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +44 -1
  288. langflow/initial_setup/starter_projects/Price Deal Finder.json +57 -5
  289. langflow/initial_setup/starter_projects/Research Agent.json +42 -3
  290. langflow/initial_setup/starter_projects/Research Translation Loop.json +66 -0
  291. langflow/initial_setup/starter_projects/SEO Keyword Generator.json +17 -0
  292. langflow/initial_setup/starter_projects/SaaS Pricing.json +27 -1
  293. langflow/initial_setup/starter_projects/Search agent.json +40 -1
  294. langflow/initial_setup/starter_projects/Sequential Tasks Agents.json +76 -7
  295. langflow/initial_setup/starter_projects/Simple Agent.json +59 -3
  296. langflow/initial_setup/starter_projects/Social Media Agent.json +77 -1
  297. langflow/initial_setup/starter_projects/Text Sentiment Analysis.json +35 -1
  298. langflow/initial_setup/starter_projects/Travel Planning Agents.json +51 -3
  299. langflow/initial_setup/starter_projects/Twitter Thread Generator.json +80 -0
  300. langflow/initial_setup/starter_projects/Vector Store RAG.json +110 -3
  301. langflow/initial_setup/starter_projects/Youtube Analysis.json +84 -3
  302. langflow/initial_setup/starter_projects/vector_store_rag.py +1 -1
  303. langflow/interface/components.py +23 -22
  304. langflow/interface/initialize/loading.py +5 -5
  305. langflow/interface/run.py +1 -1
  306. langflow/interface/utils.py +1 -1
  307. langflow/io/__init__.py +0 -1
  308. langflow/langflow_launcher.py +1 -1
  309. langflow/load/load.py +2 -7
  310. langflow/logging/__init__.py +0 -1
  311. langflow/logging/logger.py +191 -115
  312. langflow/logging/setup.py +1 -1
  313. langflow/main.py +37 -52
  314. langflow/memory.py +7 -7
  315. langflow/middleware.py +1 -1
  316. langflow/processing/process.py +6 -3
  317. langflow/schema/artifact.py +2 -2
  318. langflow/schema/data.py +10 -2
  319. langflow/schema/dataframe.py +1 -1
  320. langflow/schema/message.py +1 -1
  321. langflow/serialization/serialization.py +1 -1
  322. langflow/services/auth/mcp_encryption.py +104 -0
  323. langflow/services/auth/utils.py +2 -2
  324. langflow/services/cache/disk.py +1 -1
  325. langflow/services/cache/service.py +3 -3
  326. langflow/services/database/models/flow/model.py +2 -7
  327. langflow/services/database/models/transactions/crud.py +2 -2
  328. langflow/services/database/models/user/crud.py +2 -2
  329. langflow/services/database/service.py +8 -8
  330. langflow/services/database/utils.py +6 -5
  331. langflow/services/deps.py +2 -3
  332. langflow/services/factory.py +1 -1
  333. langflow/services/flow/flow_runner.py +7 -12
  334. langflow/services/job_queue/service.py +16 -15
  335. langflow/services/manager.py +3 -4
  336. langflow/services/settings/auth.py +1 -1
  337. langflow/services/settings/base.py +3 -8
  338. langflow/services/settings/feature_flags.py +1 -1
  339. langflow/services/settings/manager.py +1 -1
  340. langflow/services/settings/utils.py +1 -1
  341. langflow/services/socket/__init__.py +0 -1
  342. langflow/services/socket/service.py +3 -3
  343. langflow/services/socket/utils.py +4 -4
  344. langflow/services/state/service.py +1 -2
  345. langflow/services/storage/factory.py +1 -1
  346. langflow/services/storage/local.py +9 -8
  347. langflow/services/storage/s3.py +11 -10
  348. langflow/services/store/service.py +3 -3
  349. langflow/services/store/utils.py +3 -2
  350. langflow/services/task/temp_flow_cleanup.py +7 -7
  351. langflow/services/telemetry/service.py +10 -10
  352. langflow/services/tracing/arize_phoenix.py +2 -2
  353. langflow/services/tracing/langfuse.py +1 -1
  354. langflow/services/tracing/langsmith.py +1 -1
  355. langflow/services/tracing/langwatch.py +1 -1
  356. langflow/services/tracing/opik.py +1 -1
  357. langflow/services/tracing/service.py +25 -6
  358. langflow/services/tracing/traceloop.py +245 -0
  359. langflow/services/utils.py +7 -7
  360. langflow/services/variable/kubernetes.py +3 -3
  361. langflow/services/variable/kubernetes_secrets.py +2 -1
  362. langflow/services/variable/service.py +5 -5
  363. langflow/utils/component_utils.py +9 -6
  364. langflow/utils/util.py +5 -5
  365. langflow/utils/validate.py +3 -3
  366. langflow/utils/voice_utils.py +2 -2
  367. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/METADATA +2 -1
  368. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/RECORD +393 -374
  369. langflow/components/vectorstores/redis.py +0 -89
  370. langflow/frontend/assets/index-C26RqKWL.js +0 -1
  371. langflow/frontend/assets/index-CqS7zir1.css +0 -1
  372. langflow/frontend/assets/lazyIconImports-t6wEndt1.js +0 -2
  373. /langflow/components/{vectorstores → FAISS}/faiss.py +0 -0
  374. /langflow/components/{vectorstores → cassandra}/cassandra.py +0 -0
  375. /langflow/components/{datastax/cassandra.py → cassandra/cassandra_chat.py} +0 -0
  376. /langflow/components/{vectorstores → cassandra}/cassandra_graph.py +0 -0
  377. /langflow/components/{vectorstores → chroma}/chroma.py +0 -0
  378. /langflow/components/{vectorstores → clickhouse}/clickhouse.py +0 -0
  379. /langflow/components/{vectorstores → couchbase}/couchbase.py +0 -0
  380. /langflow/components/{vectorstores → datastax}/astradb.py +0 -0
  381. /langflow/components/{vectorstores → datastax}/astradb_graph.py +0 -0
  382. /langflow/components/{vectorstores → datastax}/graph_rag.py +0 -0
  383. /langflow/components/{vectorstores → datastax}/hcd.py +0 -0
  384. /langflow/components/{vectorstores → elastic}/elasticsearch.py +0 -0
  385. /langflow/components/{vectorstores → elastic}/opensearch.py +0 -0
  386. /langflow/components/{vectorstores → milvus}/milvus.py +0 -0
  387. /langflow/components/{vectorstores → mongodb}/mongodb_atlas.py +0 -0
  388. /langflow/components/{vectorstores → pgvector}/pgvector.py +0 -0
  389. /langflow/components/{vectorstores → pinecone}/pinecone.py +0 -0
  390. /langflow/components/{vectorstores → qdrant}/qdrant.py +0 -0
  391. /langflow/components/{vectorstores → supabase}/supabase.py +0 -0
  392. /langflow/components/{vectorstores → upstash}/upstash.py +0 -0
  393. /langflow/components/{vectorstores → vectara}/vectara.py +0 -0
  394. /langflow/components/{vectorstores → vectara}/vectara_rag.py +0 -0
  395. /langflow/components/{vectorstores → weaviate}/weaviate.py +0 -0
  396. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/WHEEL +0 -0
  397. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/entry_points.txt +0 -0
@@ -1,26 +1,118 @@
1
+ """Enhanced file component v2 with mypy and ruff compliance."""
2
+
3
+ from __future__ import annotations
4
+
1
5
  from copy import deepcopy
2
- from typing import Any
6
+ from enum import Enum
7
+ from typing import TYPE_CHECKING, Any
3
8
 
4
9
  from langflow.base.data.base_file import BaseFileComponent
5
10
  from langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data
6
- from langflow.io import BoolInput, FileInput, IntInput, Output
11
+ from langflow.io import (
12
+ BoolInput,
13
+ DropdownInput,
14
+ FileInput,
15
+ IntInput,
16
+ MessageTextInput,
17
+ Output,
18
+ StrInput,
19
+ )
7
20
  from langflow.schema.data import Data
21
+ from langflow.schema.message import Message
22
+
23
+ if TYPE_CHECKING:
24
+ from langflow.schema import DataFrame
25
+
26
+
27
+ class MockConversionStatus(Enum):
28
+ """Mock ConversionStatus for fallback compatibility."""
29
+
30
+ SUCCESS = "success"
31
+ FAILURE = "failure"
32
+
33
+
34
+ class MockInputFormat(Enum):
35
+ """Mock InputFormat for fallback compatibility."""
36
+
37
+ PDF = "pdf"
38
+ IMAGE = "image"
39
+
40
+
41
+ class MockImageRefMode(Enum):
42
+ """Mock ImageRefMode for fallback compatibility."""
43
+
44
+ PLACEHOLDER = "placeholder"
45
+ EMBEDDED = "embedded"
46
+
47
+
48
+ class DoclingImports:
49
+ """Container for docling imports with type information."""
50
+
51
+ def __init__(
52
+ self,
53
+ conversion_status: type[Enum],
54
+ input_format: type[Enum],
55
+ document_converter: type,
56
+ image_ref_mode: type[Enum],
57
+ strategy: str,
58
+ ) -> None:
59
+ self.conversion_status = conversion_status
60
+ self.input_format = input_format
61
+ self.document_converter = document_converter
62
+ self.image_ref_mode = image_ref_mode
63
+ self.strategy = strategy
8
64
 
9
65
 
10
66
  class FileComponent(BaseFileComponent):
11
- """Handles loading and processing of individual or zipped text files.
67
+ """Enhanced file component v2 that combines standard file loading with optional Docling processing and export.
12
68
 
13
- This component supports processing multiple valid files within a zip archive,
14
- resolving paths, validating file types, and optionally using multithreading for processing.
69
+ This component supports all features of the standard File component, plus an advanced mode
70
+ that enables Docling document processing and export to various formats (Markdown, HTML, etc.).
15
71
  """
16
72
 
17
73
  display_name = "File"
18
- description = "Loads content from one or more files."
74
+ description = "Loads content from files with optional advanced document processing and export using Docling."
19
75
  documentation: str = "https://docs.langflow.org/components-data#file"
20
76
  icon = "file-text"
21
77
  name = "File"
22
78
 
23
- VALID_EXTENSIONS = TEXT_FILE_TYPES
79
+ # Docling supported formats from original component
80
+ VALID_EXTENSIONS = [
81
+ "adoc",
82
+ "asciidoc",
83
+ "asc",
84
+ "bmp",
85
+ "csv",
86
+ "dotx",
87
+ "dotm",
88
+ "docm",
89
+ "docx",
90
+ "htm",
91
+ "html",
92
+ "jpeg",
93
+ "json",
94
+ "md",
95
+ "pdf",
96
+ "png",
97
+ "potx",
98
+ "ppsx",
99
+ "pptm",
100
+ "potm",
101
+ "ppsm",
102
+ "pptx",
103
+ "tiff",
104
+ "txt",
105
+ "xls",
106
+ "xlsx",
107
+ "xhtml",
108
+ "xml",
109
+ "webp",
110
+ *TEXT_FILE_TYPES,
111
+ ]
112
+
113
+ # Fixed export settings
114
+ EXPORT_FORMAT = "Markdown"
115
+ IMAGE_MODE = "placeholder"
24
116
 
25
117
  _base_inputs = deepcopy(BaseFileComponent._base_inputs)
26
118
 
@@ -31,6 +123,58 @@ class FileComponent(BaseFileComponent):
31
123
 
32
124
  inputs = [
33
125
  *_base_inputs,
126
+ BoolInput(
127
+ name="advanced_mode",
128
+ display_name="Advanced Parser",
129
+ value=False,
130
+ real_time_refresh=True,
131
+ info=(
132
+ "Enable advanced document processing and export with Docling for PDFs, images, and office documents. "
133
+ "Available only for single file processing."
134
+ ),
135
+ show=False,
136
+ ),
137
+ DropdownInput(
138
+ name="pipeline",
139
+ display_name="Pipeline",
140
+ info="Docling pipeline to use",
141
+ options=["standard", "vlm"],
142
+ value="standard",
143
+ advanced=True,
144
+ ),
145
+ DropdownInput(
146
+ name="ocr_engine",
147
+ display_name="OCR Engine",
148
+ info="OCR engine to use. Only available when pipeline is set to 'standard'.",
149
+ options=["", "easyocr"],
150
+ value="",
151
+ show=False,
152
+ advanced=True,
153
+ ),
154
+ StrInput(
155
+ name="md_image_placeholder",
156
+ display_name="Image placeholder",
157
+ info="Specify the image placeholder for markdown exports.",
158
+ value="<!-- image -->",
159
+ advanced=True,
160
+ show=False,
161
+ ),
162
+ StrInput(
163
+ name="md_page_break_placeholder",
164
+ display_name="Page break placeholder",
165
+ info="Add this placeholder between pages in the markdown output.",
166
+ value="",
167
+ advanced=True,
168
+ show=False,
169
+ ),
170
+ MessageTextInput(
171
+ name="doc_key",
172
+ display_name="Doc Key",
173
+ info="The key to use for the DoclingDocument column.",
174
+ value="doc",
175
+ advanced=True,
176
+ show=False,
177
+ ),
34
178
  BoolInput(
35
179
  name="use_multithreading",
36
180
  display_name="[Deprecated] Use Multithreading",
@@ -45,60 +189,284 @@ class FileComponent(BaseFileComponent):
45
189
  info="When multiple files are being processed, the number of files to process concurrently.",
46
190
  value=1,
47
191
  ),
192
+ BoolInput(
193
+ name="markdown",
194
+ display_name="Markdown Export",
195
+ info="Export processed documents to Markdown format. Only available when advanced mode is enabled.",
196
+ value=False,
197
+ show=False,
198
+ ),
48
199
  ]
49
200
 
50
201
  outputs = [
51
202
  Output(display_name="Raw Content", name="message", method="load_files_message"),
52
203
  ]
53
204
 
54
- def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict:
55
- """Dynamically show only the relevant output based on the number of files processed."""
205
+ def _path_value(self, template) -> list[str]:
206
+ # Get current path value
207
+ return template.get("path", {}).get("file_path", [])
208
+
209
+ def update_build_config(
210
+ self,
211
+ build_config: dict[str, Any],
212
+ field_value: Any,
213
+ field_name: str | None = None,
214
+ ) -> dict[str, Any]:
215
+ """Update build configuration to show/hide fields based on file count and advanced_mode."""
56
216
  if field_name == "path":
57
- # Add outputs based on the number of files in the path
58
- if len(field_value) == 0:
59
- return frontend_node
60
-
61
- frontend_node["outputs"] = []
62
-
63
- if len(field_value) == 1:
64
- # We need to check if the file is structured content
65
- file_path = frontend_node["template"]["path"]["file_path"][0]
66
- if file_path.endswith((".csv", ".xlsx", ".parquet")):
67
- frontend_node["outputs"].append(
68
- Output(display_name="Structured Content", name="dataframe", method="load_files_structured"),
69
- )
70
- elif file_path.endswith(".json"):
71
- frontend_node["outputs"].append(
72
- Output(display_name="Structured Content", name="json", method="load_files_json"),
73
- )
74
-
75
- # All files get the raw content and path outputs
217
+ # Get current path value
218
+ path_value = self._path_value(build_config)
219
+ file_path = path_value[0] if len(path_value) > 0 else ""
220
+
221
+ # Show/hide Advanced Parser based on file count (only for single files)
222
+ file_count = len(field_value) if field_value else 0
223
+ if file_count == 1 and not file_path.endswith((".csv", ".xlsx", ".parquet")):
224
+ build_config["advanced_mode"]["show"] = True
225
+ else:
226
+ build_config["advanced_mode"]["show"] = False
227
+ build_config["advanced_mode"]["value"] = False # Reset to False when hidden
228
+
229
+ # Hide all advanced fields when Advanced Parser is not available
230
+ advanced_fields = [
231
+ "pipeline",
232
+ "ocr_engine",
233
+ "doc_key",
234
+ "md_image_placeholder",
235
+ "md_page_break_placeholder",
236
+ ]
237
+ for field in advanced_fields:
238
+ if field in build_config:
239
+ build_config[field]["show"] = False
240
+
241
+ elif field_name == "advanced_mode":
242
+ # Show/hide advanced fields based on advanced_mode (only if single file)
243
+ advanced_fields = [
244
+ "pipeline",
245
+ "ocr_engine",
246
+ "doc_key",
247
+ "md_image_placeholder",
248
+ "md_page_break_placeholder",
249
+ ]
250
+
251
+ for field in advanced_fields:
252
+ if field in build_config:
253
+ build_config[field]["show"] = field_value
254
+
255
+ return build_config
256
+
257
+ def update_outputs(self, frontend_node: dict[str, Any], field_name: str, field_value: Any) -> dict[str, Any]: # noqa: ARG002
258
+ """Dynamically show outputs based on the number of files and their types."""
259
+ if field_name not in ["path", "advanced_mode"]:
260
+ return frontend_node
261
+
262
+ # Add outputs based on the number of files in the path
263
+ template = frontend_node.get("template", {})
264
+ path_value = self._path_value(template)
265
+ if len(path_value) == 0:
266
+ return frontend_node
267
+
268
+ # Clear existing outputs
269
+ frontend_node["outputs"] = []
270
+
271
+ if len(path_value) == 1:
272
+ # We need to check if the file is structured content
273
+ file_path = path_value[0] if field_name == "path" else frontend_node["template"]["path"]["file_path"][0]
274
+ if file_path.endswith((".csv", ".xlsx", ".parquet")):
76
275
  frontend_node["outputs"].append(
77
- Output(display_name="Raw Content", name="message", method="load_files_message"),
276
+ Output(display_name="Structured Content", name="dataframe", method="load_files_structured"),
277
+ )
278
+ elif file_path.endswith(".json"):
279
+ frontend_node["outputs"].append(
280
+ Output(display_name="Structured Content", name="json", method="load_files_json"),
281
+ )
282
+
283
+ # Add outputs based on advanced mode
284
+ advanced_mode = frontend_node.get("template", {}).get("advanced_mode", {}).get("value", False)
285
+
286
+ if advanced_mode:
287
+ # Advanced mode: Structured Output, Markdown, and File Path
288
+ frontend_node["outputs"].append(
289
+ Output(display_name="Structured Output", name="advanced", method="load_files_advanced"),
290
+ )
291
+ frontend_node["outputs"].append(
292
+ Output(display_name="Markdown", name="markdown", method="load_files_markdown"),
78
293
  )
79
294
  frontend_node["outputs"].append(
80
295
  Output(display_name="File Path", name="path", method="load_files_path"),
81
296
  )
82
297
  else:
83
- # For multiple files, we only show the files output
298
+ # Normal mode: Raw Content and File Path
84
299
  frontend_node["outputs"].append(
85
- Output(display_name="Files", name="dataframe", method="load_files"),
300
+ Output(display_name="Raw Content", name="message", method="load_files_message"),
86
301
  )
302
+ frontend_node["outputs"].append(
303
+ Output(display_name="File Path", name="path", method="load_files_path"),
304
+ )
305
+ else:
306
+ # For multiple files, we show the files output (DataFrame format)
307
+ # Advanced Parser is not available for multiple files
308
+ frontend_node["outputs"].append(
309
+ Output(display_name="Files", name="dataframe", method="load_files"),
310
+ )
87
311
 
88
312
  return frontend_node
89
313
 
90
- def process_files(self, file_list: list[BaseFileComponent.BaseFile]) -> list[BaseFileComponent.BaseFile]:
91
- """Processes files either sequentially or in parallel, depending on concurrency settings.
314
+ def _try_import_docling(self) -> DoclingImports | None:
315
+ """Try different import strategies for docling components."""
316
+ # Try strategy 1: Latest docling structure
317
+ try:
318
+ from docling.datamodel.base_models import ConversionStatus, InputFormat # type: ignore[import-untyped]
319
+ from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
320
+ from docling_core.types.doc import ImageRefMode # type: ignore[import-untyped]
321
+
322
+ self.log("Using latest docling import structure")
323
+ return DoclingImports(
324
+ conversion_status=ConversionStatus,
325
+ input_format=InputFormat,
326
+ document_converter=DocumentConverter,
327
+ image_ref_mode=ImageRefMode,
328
+ strategy="latest",
329
+ )
330
+ except ImportError as e:
331
+ self.log(f"Latest docling structure failed: {e}")
332
+
333
+ # Try strategy 2: Alternative import paths
334
+ try:
335
+ from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
336
+ from docling_core.types.doc import ImageRefMode # type: ignore[import-untyped]
337
+
338
+ # Try to get ConversionStatus from different locations
339
+ conversion_status: type[Enum] = MockConversionStatus
340
+ input_format: type[Enum] = MockInputFormat
341
+
342
+ try:
343
+ from docling_core.types import ConversionStatus, InputFormat # type: ignore[import-untyped]
344
+
345
+ conversion_status = ConversionStatus
346
+ input_format = InputFormat
347
+ except ImportError:
348
+ try:
349
+ from docling.datamodel import ConversionStatus, InputFormat # type: ignore[import-untyped]
350
+
351
+ conversion_status = ConversionStatus
352
+ input_format = InputFormat
353
+ except ImportError:
354
+ # Use mock enums if we can't find them
355
+ pass
356
+
357
+ self.log("Using alternative docling import structure")
358
+ return DoclingImports(
359
+ conversion_status=conversion_status,
360
+ input_format=input_format,
361
+ document_converter=DocumentConverter,
362
+ image_ref_mode=ImageRefMode,
363
+ strategy="alternative",
364
+ )
365
+ except ImportError as e:
366
+ self.log(f"Alternative docling structure failed: {e}")
367
+
368
+ # Try strategy 3: Basic converter only
369
+ try:
370
+ from docling.document_converter import DocumentConverter # type: ignore[import-untyped]
371
+
372
+ self.log("Using basic docling import structure with mocks")
373
+ return DoclingImports(
374
+ conversion_status=MockConversionStatus,
375
+ input_format=MockInputFormat,
376
+ document_converter=DocumentConverter,
377
+ image_ref_mode=MockImageRefMode,
378
+ strategy="basic",
379
+ )
380
+ except ImportError as e:
381
+ self.log(f"Basic docling structure failed: {e}")
382
+
383
+ # Strategy 4: Complete fallback - return None to indicate failure
384
+ return None
385
+
386
+ def _create_advanced_converter(self, docling_imports: DoclingImports) -> Any:
387
+ """Create advanced converter with pipeline options if available."""
388
+ try:
389
+ from docling.datamodel.pipeline_options import PdfPipelineOptions # type: ignore[import-untyped]
390
+ from docling.document_converter import PdfFormatOption # type: ignore[import-untyped]
391
+
392
+ document_converter = docling_imports.document_converter
393
+ input_format = docling_imports.input_format
394
+
395
+ # Create basic pipeline options
396
+ pipeline_options = PdfPipelineOptions()
397
+
398
+ # Configure OCR if specified and available
399
+ if self.ocr_engine:
400
+ try:
401
+ from docling.models.factories import get_ocr_factory # type: ignore[import-untyped]
402
+
403
+ pipeline_options.do_ocr = True
404
+ ocr_factory = get_ocr_factory(allow_external_plugins=False)
405
+ ocr_options = ocr_factory.create_options(kind=self.ocr_engine)
406
+ pipeline_options.ocr_options = ocr_options
407
+ self.log(f"Configured OCR with engine: {self.ocr_engine}")
408
+ except Exception as e: # noqa: BLE001
409
+ self.log(f"Could not configure OCR: {e}, proceeding without OCR")
410
+ pipeline_options.do_ocr = False
411
+
412
+ # Create format options
413
+ pdf_format_option = PdfFormatOption(pipeline_options=pipeline_options)
414
+ format_options = {}
415
+ if hasattr(input_format, "PDF"):
416
+ format_options[input_format.PDF] = pdf_format_option
417
+ if hasattr(input_format, "IMAGE"):
418
+ format_options[input_format.IMAGE] = pdf_format_option
92
419
 
93
- Args:
94
- file_list (list[BaseFileComponent.BaseFile]): List of files to process.
420
+ return document_converter(format_options=format_options)
95
421
 
96
- Returns:
97
- list[BaseFileComponent.BaseFile]: Updated list of files with merged data.
98
- """
422
+ except Exception as e: # noqa: BLE001
423
+ self.log(f"Could not create advanced converter: {e}, using basic converter")
424
+ return docling_imports.document_converter()
99
425
 
100
- def process_file(file_path: str, *, silent_errors: bool = False) -> Data | None:
101
- """Processes a single file and returns its Data object."""
426
+ def _is_docling_compatible(self, file_path: str) -> bool:
427
+ """Check if file is compatible with Docling processing."""
428
+ # All VALID_EXTENSIONS are Docling compatible (except for TEXT_FILE_TYPES which may overlap)
429
+ docling_extensions = [
430
+ ".adoc",
431
+ ".asciidoc",
432
+ ".asc",
433
+ ".bmp",
434
+ ".csv",
435
+ ".dotx",
436
+ ".dotm",
437
+ ".docm",
438
+ ".docx",
439
+ ".htm",
440
+ ".html",
441
+ ".jpeg",
442
+ ".json",
443
+ ".md",
444
+ ".pdf",
445
+ ".png",
446
+ ".potx",
447
+ ".ppsx",
448
+ ".pptm",
449
+ ".potm",
450
+ ".ppsm",
451
+ ".pptx",
452
+ ".tiff",
453
+ ".txt",
454
+ ".xls",
455
+ ".xlsx",
456
+ ".xhtml",
457
+ ".xml",
458
+ ".webp",
459
+ ]
460
+ return any(file_path.lower().endswith(ext) for ext in docling_extensions)
461
+
462
+ def process_files(
463
+ self,
464
+ file_list: list[BaseFileComponent.BaseFile],
465
+ ) -> list[BaseFileComponent.BaseFile]:
466
+ """Process files using standard parsing or Docling based on advanced_mode and file type."""
467
+
468
+ def process_file_standard(file_path: str, *, silent_errors: bool = False) -> Data | None:
469
+ """Process a single file using standard text parsing."""
102
470
  try:
103
471
  return parse_text_file_to_data(file_path, silent_errors=silent_errors)
104
472
  except FileNotFoundError as e:
@@ -114,27 +482,179 @@ class FileComponent(BaseFileComponent):
114
482
  raise
115
483
  return None
116
484
 
485
+ def process_file_docling(file_path: str, *, silent_errors: bool = False) -> Data | None:
486
+ """Process a single file using Docling if compatible, otherwise standard processing."""
487
+ # Try Docling first if file is compatible and advanced mode is enabled
488
+ try:
489
+ return self._process_with_docling_and_export(file_path)
490
+ except Exception as e: # noqa: BLE001
491
+ self.log(f"Docling processing failed for {file_path}: {e}, falling back to standard processing")
492
+ if not silent_errors:
493
+ # Return error data instead of raising
494
+ return Data(data={"error": f"Docling processing failed: {e}", "file_path": file_path})
495
+
496
+ return None
497
+
117
498
  if not file_list:
118
499
  msg = "No files to process."
119
500
  raise ValueError(msg)
120
501
 
502
+ file_path = str(file_list[0].path)
503
+ if self.advanced_mode and self._is_docling_compatible(file_path):
504
+ processed_data = process_file_docling(file_path)
505
+ if not processed_data:
506
+ msg = f"Failed to process file with Docling: {file_path}"
507
+ raise ValueError(msg)
508
+
509
+ # Serialize processed data to match Data structure
510
+ serialized_data = processed_data.serialize_model()
511
+
512
+ # Now, if doc is nested, we need to unravel it
513
+ clean_data: list[Data | None] = [processed_data]
514
+
515
+ # This is where we've manually processed the data
516
+ try:
517
+ if "exported_content" not in serialized_data:
518
+ clean_data = [
519
+ Data(
520
+ data={
521
+ "file_path": file_path,
522
+ **(
523
+ item["element"]
524
+ if "element" in item
525
+ else {k: v for k, v in item.items() if k != "file_path"}
526
+ ),
527
+ }
528
+ )
529
+ for item in serialized_data["doc"]
530
+ ]
531
+ except Exception as _: # noqa: BLE001
532
+ raise ValueError(serialized_data) from None
533
+
534
+ # Repeat file_list to match the number of processed data elements
535
+ final_data: list[Data | None] = clean_data
536
+ return self.rollup_data(file_list, final_data)
537
+
121
538
  concurrency = 1 if not self.use_multithreading else max(1, self.concurrency_multithreading)
122
539
  file_count = len(file_list)
123
540
 
124
- parallel_processing_threshold = 2
125
- if concurrency < parallel_processing_threshold or file_count < parallel_processing_threshold:
126
- if file_count > 1:
127
- self.log(f"Processing {file_count} files sequentially.")
128
- processed_data = [process_file(str(file.path), silent_errors=self.silent_errors) for file in file_list]
129
- else:
130
- self.log(f"Starting parallel processing of {file_count} files with concurrency: {concurrency}.")
131
- file_paths = [str(file.path) for file in file_list]
132
- processed_data = parallel_load_data(
133
- file_paths,
134
- silent_errors=self.silent_errors,
135
- load_function=process_file,
136
- max_concurrency=concurrency,
541
+ self.log(f"Starting parallel processing of {file_count} files with concurrency: {concurrency}.")
542
+ file_paths = [str(file.path) for file in file_list]
543
+ my_data = parallel_load_data(
544
+ file_paths,
545
+ silent_errors=self.silent_errors,
546
+ load_function=process_file_standard,
547
+ max_concurrency=concurrency,
548
+ )
549
+
550
+ return self.rollup_data(file_list, my_data)
551
+
552
+ def load_files_advanced(self) -> DataFrame:
553
+ """Load files using advanced Docling processing and export to an advanced format."""
554
+ # TODO: Update
555
+ self.markdown = False
556
+ return self.load_files()
557
+
558
+ def load_files_markdown(self) -> Message:
559
+ """Load files using advanced Docling processing and export to Markdown format."""
560
+ self.markdown = True
561
+ result = self.load_files()
562
+ return Message(text=str(result.text[0]))
563
+
564
+ def _process_with_docling_and_export(self, file_path: str) -> Data:
565
+ """Process a single file with Docling and export to the specified format."""
566
+ # Import docling components only when needed
567
+ docling_imports = self._try_import_docling()
568
+
569
+ if docling_imports is None:
570
+ msg = "Docling not available for advanced processing"
571
+ raise ImportError(msg)
572
+
573
+ conversion_status = docling_imports.conversion_status
574
+ document_converter = docling_imports.document_converter
575
+ image_ref_mode = docling_imports.image_ref_mode
576
+
577
+ try:
578
+ # Create converter based on strategy and pipeline setting
579
+ if docling_imports.strategy == "latest" and self.pipeline == "standard":
580
+ converter = self._create_advanced_converter(docling_imports)
581
+ else:
582
+ # Use basic converter for compatibility
583
+ converter = document_converter()
584
+ self.log("Using basic DocumentConverter for Docling processing")
585
+
586
+ # Process single file
587
+ result = converter.convert(file_path)
588
+
589
+ # Check if conversion was successful
590
+ success = False
591
+ if hasattr(result, "status"):
592
+ if hasattr(conversion_status, "SUCCESS"):
593
+ success = result.status == conversion_status.SUCCESS
594
+ else:
595
+ success = str(result.status).lower() == "success"
596
+ elif hasattr(result, "document"):
597
+ # If no status but has document, assume success
598
+ success = result.document is not None
599
+
600
+ if not success:
601
+ return Data(data={"error": "Docling conversion failed", "file_path": file_path})
602
+
603
+ if self.markdown:
604
+ self.log("Exporting document to Markdown format")
605
+ # Export the document to the specified format
606
+ exported_content = self._export_document(result.document, image_ref_mode)
607
+
608
+ return Data(
609
+ text=exported_content,
610
+ data={
611
+ "exported_content": exported_content,
612
+ "export_format": self.EXPORT_FORMAT,
613
+ "file_path": file_path,
614
+ },
615
+ )
616
+
617
+ return Data(
618
+ data={
619
+ "doc": self.docling_to_dataframe_simple(result.document.export_to_dict()),
620
+ "export_format": self.EXPORT_FORMAT,
621
+ "file_path": file_path,
622
+ }
137
623
  )
138
624
 
139
- # Use rollup_basefile_data to merge processed data with BaseFile objects
140
- return self.rollup_data(file_list, processed_data)
625
+ except Exception as e: # noqa: BLE001
626
+ return Data(data={"error": f"Docling processing error: {e!s}", "file_path": file_path})
627
+
628
+ def docling_to_dataframe_simple(self, doc):
629
+ """Extract all text elements into a simple DataFrame."""
630
+ return [
631
+ {
632
+ "page_no": text["prov"][0]["page_no"] if text["prov"] else None,
633
+ "label": text["label"],
634
+ "text": text["text"],
635
+ "level": text.get("level", None), # for headers
636
+ }
637
+ for text in doc["texts"]
638
+ ]
639
+
640
+ def _export_document(self, document: Any, image_ref_mode: type[Enum]) -> str:
641
+ """Export document to Markdown format with placeholder images."""
642
+ try:
643
+ image_mode = (
644
+ image_ref_mode(self.IMAGE_MODE) if hasattr(image_ref_mode, self.IMAGE_MODE) else self.IMAGE_MODE
645
+ )
646
+
647
+ # Always export to Markdown since it's fixed
648
+ return document.export_to_markdown(
649
+ image_mode=image_mode,
650
+ image_placeholder=self.md_image_placeholder,
651
+ page_break_placeholder=self.md_page_break_placeholder,
652
+ )
653
+
654
+ except Exception as e: # noqa: BLE001
655
+ self.log(f"Markdown export failed: {e}, using basic text export")
656
+ # Fallback to basic text export
657
+ try:
658
+ return document.export_to_text()
659
+ except Exception: # noqa: BLE001
660
+ return str(document)