langflow-base-nightly 0.5.0.dev37__py3-none-any.whl → 0.5.0.dev39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (397) hide show
  1. langflow/__main__.py +1 -1
  2. langflow/alembic/versions/0882f9657f22_encrypt_existing_mcp_auth_settings_.py +122 -0
  3. langflow/alembic/versions/4e5980a44eaa_fix_date_times_again.py +24 -30
  4. langflow/alembic/versions/58b28437a398_modify_nullable.py +6 -6
  5. langflow/alembic/versions/79e675cb6752_change_datetime_type.py +24 -30
  6. langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +12 -13
  7. langflow/api/build.py +21 -26
  8. langflow/api/health_check_router.py +3 -3
  9. langflow/api/utils.py +3 -3
  10. langflow/api/v1/callback.py +2 -2
  11. langflow/api/v1/chat.py +19 -31
  12. langflow/api/v1/endpoints.py +10 -10
  13. langflow/api/v1/flows.py +1 -1
  14. langflow/api/v1/knowledge_bases.py +3 -3
  15. langflow/api/v1/mcp.py +12 -12
  16. langflow/api/v1/mcp_projects.py +405 -120
  17. langflow/api/v1/mcp_utils.py +8 -8
  18. langflow/api/v1/schemas.py +2 -7
  19. langflow/api/v1/store.py +1 -1
  20. langflow/api/v1/validate.py +2 -2
  21. langflow/api/v1/voice_mode.py +58 -62
  22. langflow/api/v2/files.py +2 -2
  23. langflow/api/v2/mcp.py +10 -9
  24. langflow/base/composio/composio_base.py +21 -2
  25. langflow/base/data/docling_utils.py +194 -0
  26. langflow/base/embeddings/aiml_embeddings.py +1 -1
  27. langflow/base/flow_processing/utils.py +1 -2
  28. langflow/base/io/__init__.py +0 -1
  29. langflow/base/langwatch/utils.py +2 -1
  30. langflow/base/mcp/util.py +49 -47
  31. langflow/base/prompts/api_utils.py +1 -1
  32. langflow/base/tools/flow_tool.py +2 -2
  33. langflow/base/tools/run_flow.py +2 -6
  34. langflow/components/FAISS/__init__.py +34 -0
  35. langflow/components/Notion/add_content_to_page.py +2 -2
  36. langflow/components/Notion/list_database_properties.py +2 -2
  37. langflow/components/Notion/list_pages.py +2 -2
  38. langflow/components/Notion/page_content_viewer.py +2 -2
  39. langflow/components/Notion/update_page_property.py +1 -1
  40. langflow/components/agentql/agentql_api.py +2 -10
  41. langflow/components/agents/agent.py +249 -55
  42. langflow/components/agents/mcp_component.py +14 -14
  43. langflow/components/anthropic/anthropic.py +5 -4
  44. langflow/components/assemblyai/assemblyai_get_subtitles.py +2 -2
  45. langflow/components/assemblyai/assemblyai_lemur.py +2 -2
  46. langflow/components/assemblyai/assemblyai_list_transcripts.py +2 -2
  47. langflow/components/assemblyai/assemblyai_poll_transcript.py +2 -2
  48. langflow/components/assemblyai/assemblyai_start_transcript.py +2 -2
  49. langflow/components/cassandra/__init__.py +40 -0
  50. langflow/components/chroma/__init__.py +34 -0
  51. langflow/components/clickhouse/__init__.py +34 -0
  52. langflow/components/couchbase/__init__.py +34 -0
  53. langflow/components/data/file.py +575 -55
  54. langflow/components/data/url.py +1 -1
  55. langflow/components/datastax/__init__.py +3 -3
  56. langflow/components/datastax/astra_assistant_manager.py +3 -3
  57. langflow/components/datastax/create_assistant.py +1 -2
  58. langflow/components/deactivated/merge_data.py +1 -2
  59. langflow/components/deactivated/sub_flow.py +6 -7
  60. langflow/components/deactivated/vectara_self_query.py +3 -3
  61. langflow/components/docling/__init__.py +0 -198
  62. langflow/components/docling/docling_inline.py +1 -1
  63. langflow/components/elastic/__init__.py +37 -0
  64. langflow/components/embeddings/text_embedder.py +3 -3
  65. langflow/components/firecrawl/firecrawl_extract_api.py +2 -9
  66. langflow/components/google/gmail.py +1 -1
  67. langflow/components/google/google_generative_ai.py +5 -11
  68. langflow/components/groq/groq.py +4 -3
  69. langflow/components/helpers/current_date.py +2 -3
  70. langflow/components/helpers/memory.py +1 -1
  71. langflow/components/ibm/watsonx.py +1 -1
  72. langflow/components/ibm/watsonx_embeddings.py +1 -1
  73. langflow/components/langwatch/langwatch.py +3 -3
  74. langflow/components/logic/flow_tool.py +2 -2
  75. langflow/components/logic/notify.py +1 -1
  76. langflow/components/logic/run_flow.py +2 -3
  77. langflow/components/logic/sub_flow.py +4 -5
  78. langflow/components/mem0/mem0_chat_memory.py +2 -8
  79. langflow/components/milvus/__init__.py +34 -0
  80. langflow/components/mongodb/__init__.py +34 -0
  81. langflow/components/nvidia/nvidia.py +3 -3
  82. langflow/components/olivya/olivya.py +7 -7
  83. langflow/components/ollama/ollama.py +9 -6
  84. langflow/components/perplexity/perplexity.py +3 -13
  85. langflow/components/pgvector/__init__.py +34 -0
  86. langflow/components/pinecone/__init__.py +34 -0
  87. langflow/components/processing/batch_run.py +8 -8
  88. langflow/components/processing/data_operations.py +2 -2
  89. langflow/components/processing/merge_data.py +1 -2
  90. langflow/components/processing/message_to_data.py +2 -3
  91. langflow/components/processing/parse_json_data.py +1 -1
  92. langflow/components/prototypes/python_function.py +2 -3
  93. langflow/components/qdrant/__init__.py +34 -0
  94. langflow/components/redis/__init__.py +36 -2
  95. langflow/components/redis/redis.py +75 -29
  96. langflow/components/redis/redis_chat.py +43 -0
  97. langflow/components/serpapi/serp.py +1 -1
  98. langflow/components/supabase/__init__.py +37 -0
  99. langflow/components/tavily/tavily_extract.py +1 -1
  100. langflow/components/tavily/tavily_search.py +1 -1
  101. langflow/components/tools/calculator.py +2 -2
  102. langflow/components/tools/python_code_structured_tool.py +3 -10
  103. langflow/components/tools/python_repl.py +2 -2
  104. langflow/components/tools/searxng.py +3 -3
  105. langflow/components/tools/serp_api.py +2 -2
  106. langflow/components/tools/tavily_search_tool.py +2 -2
  107. langflow/components/tools/yahoo_finance.py +1 -1
  108. langflow/components/twelvelabs/video_embeddings.py +4 -4
  109. langflow/components/upstash/__init__.py +34 -0
  110. langflow/components/vectara/__init__.py +37 -0
  111. langflow/components/vectorstores/__init__.py +0 -69
  112. langflow/components/vectorstores/local_db.py +2 -1
  113. langflow/components/weaviate/__init__.py +34 -0
  114. langflow/components/yahoosearch/yahoo.py +1 -1
  115. langflow/components/youtube/trending.py +3 -4
  116. langflow/custom/attributes.py +2 -1
  117. langflow/custom/code_parser/code_parser.py +1 -1
  118. langflow/custom/custom_component/base_component.py +1 -1
  119. langflow/custom/custom_component/component.py +16 -2
  120. langflow/custom/dependency_analyzer.py +165 -0
  121. langflow/custom/directory_reader/directory_reader.py +7 -7
  122. langflow/custom/directory_reader/utils.py +1 -2
  123. langflow/custom/utils.py +63 -45
  124. langflow/events/event_manager.py +1 -1
  125. langflow/frontend/assets/{SlackIcon-CnvyOamQ.js → SlackIcon-Cr3Q15Px.js} +1 -1
  126. langflow/frontend/assets/{Wikipedia-nyTEXdr2.js → Wikipedia-GxM5sPdM.js} +1 -1
  127. langflow/frontend/assets/{Wolfram-BYMQkNSq.js → Wolfram-BN3-VOCA.js} +1 -1
  128. langflow/frontend/assets/{index-DZTC5pdT.js → index-28oOcafk.js} +1 -1
  129. langflow/frontend/assets/{index-ChXJpBz4.js → index-2wSXqBtB.js} +1 -1
  130. langflow/frontend/assets/{index-BB15_iOb.js → index-3wW7BClE.js} +1 -1
  131. langflow/frontend/assets/{index-DKHNourL.js → index-6pyH3ZJB.js} +1 -1
  132. langflow/frontend/assets/{index-BvwZfF2i.js → index-AWCSdofD.js} +1 -1
  133. langflow/frontend/assets/{index-Bvxg4_ux.js → index-B2Zgv_xv.js} +1 -1
  134. langflow/frontend/assets/{index-Bd6WtbKA.js → index-B2ptVQGM.js} +1 -1
  135. langflow/frontend/assets/{index-C7QWbnLK.js → index-B3TANVes.js} +1 -1
  136. langflow/frontend/assets/{index-CpvYQ0ug.js → index-B4yCvZKV.js} +1 -1
  137. langflow/frontend/assets/{index-Dg-63Si_.js → index-BC65VuWx.js} +1 -1
  138. langflow/frontend/assets/{index-C6jri9Wm.js → index-BCDSei1q.js} +1 -1
  139. langflow/frontend/assets/{index-OazXJdEl.js → index-BJy50PvP.js} +1 -1
  140. langflow/frontend/assets/{index-CWdkbVsd.js → index-BKseQQ2I.js} +1 -1
  141. langflow/frontend/assets/{index-CaQ_H9ww.js → index-BLTxEeTi.js} +1 -1
  142. langflow/frontend/assets/{index-DGRMNe9n.js → index-BRg1f4Mu.js} +1 -1
  143. langflow/frontend/assets/{index-D8lOi1GI.js → index-BS8Vo8nc.js} +1 -1
  144. langflow/frontend/assets/{index-B748uLP1.js → index-BTKOU4xC.js} +1 -1
  145. langflow/frontend/assets/{index-Dqd4RjYA.js → index-BVwJDmw-.js} +1 -1
  146. langflow/frontend/assets/{index-DbMFlnHE.js → index-BWYuQ2Sj.js} +1 -1
  147. langflow/frontend/assets/{index-BEMw2Np8.js → index-BWdLILDG.js} +1 -1
  148. langflow/frontend/assets/{index-BmX5CoED.js → index-BZcw4827.js} +1 -1
  149. langflow/frontend/assets/{index-CyPvTB63.js → index-Bbi87Ve4.js} +1 -1
  150. langflow/frontend/assets/{index-BTEW9e8P.js → index-Bf0IYKLd.js} +1 -1
  151. langflow/frontend/assets/{index-BZgXW854.js → index-Bg5nrMRh.js} +1 -1
  152. langflow/frontend/assets/{index-BBxAPk1y.js → index-BiC280Nx.js} +1 -1
  153. langflow/frontend/assets/{index-BR0bkVqX.js → index-BiKKN6FR.js} +1 -1
  154. langflow/frontend/assets/{index-CTrt1Q_j.js → index-Bief6eyJ.js} +1 -1
  155. langflow/frontend/assets/{index-D5_DsUJc.js → index-BkXec1Yf.js} +1 -1
  156. langflow/frontend/assets/{index-CZQ9rXNa.js → index-Bnl6QHtP.js} +1 -1
  157. langflow/frontend/assets/{index-BChjg6Az.js → index-BpxbUiZD.js} +1979 -1979
  158. langflow/frontend/assets/{index-BOeo01QB.js → index-BrJV8psX.js} +1 -1
  159. langflow/frontend/assets/{index-DysKpOuj.js → index-BwLWcUXL.js} +1 -1
  160. langflow/frontend/assets/{index-Bnqod3vk.js → index-Bx7dBY26.js} +1 -1
  161. langflow/frontend/assets/{index-D3DDfngy.js → index-C-EdnFdA.js} +1 -1
  162. langflow/frontend/assets/{index-Bsa0xZyL.js → index-C-Xfg4cD.js} +1 -1
  163. langflow/frontend/assets/{index-BTrsh9LS.js → index-C1f2wMat.js} +1 -1
  164. langflow/frontend/assets/index-C1xroOlH.css +1 -0
  165. langflow/frontend/assets/{index-B1YN7oMV.js → index-C3KequvP.js} +1 -1
  166. langflow/frontend/assets/{index-DzW2mfkK.js → index-C3ZjKdCD.js} +1 -1
  167. langflow/frontend/assets/{index-ajRge-Mg.js → index-C3l0zYn0.js} +1 -1
  168. langflow/frontend/assets/{index-cvZdgWHQ.js → index-C3yvArUT.js} +1 -1
  169. langflow/frontend/assets/{index-C-2hghRJ.js → index-C9Cxnkl8.js} +1 -1
  170. langflow/frontend/assets/{index-BhIOhlCH.js → index-CBc8fEAE.js} +1 -1
  171. langflow/frontend/assets/{index-B3Sur4Z3.js → index-CBvrGgID.js} +1 -1
  172. langflow/frontend/assets/{index-CCePCqkT.js → index-CD-PqGCY.js} +1 -1
  173. langflow/frontend/assets/{index-8yMsjVV2.js → index-CGO1CiUr.js} +1 -1
  174. langflow/frontend/assets/{index-DF5VwgU6.js → index-CH5UVA9b.js} +1 -1
  175. langflow/frontend/assets/{index-dcnYpT9N.js → index-CLJeJYjH.js} +1 -1
  176. langflow/frontend/assets/{index-DfxYyS3M.js → index-CMZ79X-Y.js} +1 -1
  177. langflow/frontend/assets/{index-ya2uXE8v.js → index-CMzfJKiW.js} +1 -1
  178. langflow/frontend/assets/{index-DkelbYy7.js → index-CNw1H-Wc.js} +1 -1
  179. langflow/frontend/assets/{index-DytJENYD.js → index-CPHEscq9.js} +1 -1
  180. langflow/frontend/assets/{index-Bv8h2Z-q.js → index-CRPKJZw9.js} +1 -1
  181. langflow/frontend/assets/{index-D-9TI74R.js → index-CRPyCfYy.js} +1 -1
  182. langflow/frontend/assets/{index-BLGYN-9b.js → index-CRcMqCIj.js} +1 -1
  183. langflow/frontend/assets/{index-tVYiABdp.js → index-CUVDws8F.js} +1 -1
  184. langflow/frontend/assets/{index-CpcbQZIF.js → index-CVWQfRYZ.js} +1 -1
  185. langflow/frontend/assets/{index-DPCzHdsC.js → index-CVl6MbaM.js} +1 -1
  186. langflow/frontend/assets/{index-DkXy1WFo.js → index-CVwWoX99.js} +1 -1
  187. langflow/frontend/assets/{index-DK1Ptcc4.js → index-CWPzZtSx.js} +1 -1
  188. langflow/frontend/assets/{index-DHq8TQPB.js → index-CZqRL9DE.js} +1 -1
  189. langflow/frontend/assets/{index-DnEGCgih.js → index-CdIf07Rw.js} +1 -1
  190. langflow/frontend/assets/{index-BIQQCMvz.js → index-Cewy7JZE.js} +1 -1
  191. langflow/frontend/assets/{index-D8GJngXa.js → index-CfwLpbMM.js} +1 -1
  192. langflow/frontend/assets/{index-C_TdzfAn.js → index-CiR1dxI4.js} +1 -1
  193. langflow/frontend/assets/{index-BzL_EoKd.js → index-CiixOzDG.js} +1 -1
  194. langflow/frontend/assets/{index-Boso-xEw.js → index-ClsuDmR6.js} +1 -1
  195. langflow/frontend/assets/{index-8WdfSTTz.js → index-CmEYYRN1.js} +1 -1
  196. langflow/frontend/assets/{index-FUxmznS-.js → index-Co20d-eQ.js} +1 -1
  197. langflow/frontend/assets/{index-C82JjCPD.js → index-CpzXS6md.js} +1 -1
  198. langflow/frontend/assets/{index-DIDDfmlJ.js → index-Cqpzl1J4.js} +1 -1
  199. langflow/frontend/assets/{index-_UcqeEjm.js → index-CtVIONP2.js} +1 -1
  200. langflow/frontend/assets/{index-Gkrq-vzm.js → index-CuFXdTx4.js} +1 -1
  201. langflow/frontend/assets/{index-WPFivmdQ.js → index-Cyd2HtHK.js} +1 -1
  202. langflow/frontend/assets/{index-BFp_O-c9.js → index-D-1tA8Dt.js} +1 -1
  203. langflow/frontend/assets/{index-BqPpO6KG.js → index-D-KY3kkq.js} +1 -1
  204. langflow/frontend/assets/{index-Db71w3lq.js → index-D-_B1a8v.js} +1 -1
  205. langflow/frontend/assets/{index-BIzTEqFh.js → index-D14EWPyZ.js} +1 -1
  206. langflow/frontend/assets/{index-BbJjt5m4.js → index-D2N3l-cw.js} +1 -1
  207. langflow/frontend/assets/{index-DCRk27Tp.js → index-D5ETnvJa.js} +1 -1
  208. langflow/frontend/assets/{index-CvcEzq4x.js → index-D7kquVv2.js} +1 -1
  209. langflow/frontend/assets/{index-Q9vDw0Xl.js → index-DA6-bvgN.js} +1 -1
  210. langflow/frontend/assets/{index-l7bzB8Ex.js → index-DDWBeudF.js} +1 -1
  211. langflow/frontend/assets/{index-BCCGvqay.js → index-DDcMAaG4.js} +1 -1
  212. langflow/frontend/assets/{index-pCQ_yw8m.js → index-DHgomBdh.js} +1 -1
  213. langflow/frontend/assets/{index-BxEuHa76.js → index-DJP-ss47.js} +1 -1
  214. langflow/frontend/assets/{index-BbRm7beF.js → index-DQ7VYqQc.js} +1 -1
  215. langflow/frontend/assets/{index-Car-zdor.js → index-DTqbvGC0.js} +1 -1
  216. langflow/frontend/assets/{index-BRxvproo.js → index-DUpri6zF.js} +1 -1
  217. langflow/frontend/assets/{index-BQ6NUdMY.js → index-DV3utZDZ.js} +1 -1
  218. langflow/frontend/assets/{index-DjQETUy8.js → index-DXRfN4HV.js} +1 -1
  219. langflow/frontend/assets/{index-DfngcQxO.js → index-Db9dYSzy.js} +1 -1
  220. langflow/frontend/assets/{index-rXV1G1aB.js → index-DdtMEn6I.js} +1 -1
  221. langflow/frontend/assets/{index-DmMDPoi0.js → index-DfDhMHgQ.js} +1 -1
  222. langflow/frontend/assets/{index-DJB12jIC.js → index-Dfe7qfvf.js} +1 -1
  223. langflow/frontend/assets/{index-C_veJlEb.js → index-DhtZ5hx8.js} +1 -1
  224. langflow/frontend/assets/{index-CQMoqLAu.js → index-DiB3CTo8.js} +1 -1
  225. langflow/frontend/assets/{index-DVlceYFD.js → index-DiGWASY5.js} +1 -1
  226. langflow/frontend/assets/{index-Du_18NCU.js → index-Dl5amdBz.js} +1 -1
  227. langflow/frontend/assets/{index-CYDAYm-i.js → index-DlD4dXlZ.js} +1 -1
  228. langflow/frontend/assets/{index-CLPdN-q6.js → index-DmeiHnfl.js} +1 -1
  229. langflow/frontend/assets/index-Dmu-X5-4.js +1 -0
  230. langflow/frontend/assets/{index-BzEUlaw_.js → index-DpVWih90.js} +1 -1
  231. langflow/frontend/assets/{index-D6PSjHxP.js → index-DrDrcajG.js} +1 -1
  232. langflow/frontend/assets/{index-Dq5ilsem.js → index-Du-pc0KE.js} +1 -1
  233. langflow/frontend/assets/{index-CYe8Ipef.js → index-DwPkMTaY.js} +1 -1
  234. langflow/frontend/assets/{index-BVEZDXxS.js → index-DwQEZe3C.js} +1 -1
  235. langflow/frontend/assets/{index-BvT7L317.js → index-DyJFTK24.js} +1 -1
  236. langflow/frontend/assets/{index-HK3bVMYA.js → index-J38wh62w.js} +1 -1
  237. langflow/frontend/assets/{index-CCxGSSTT.js → index-Kwdl-e29.js} +1 -1
  238. langflow/frontend/assets/{index-BOB_zsjl.js → index-OwPvCmpW.js} +1 -1
  239. langflow/frontend/assets/{index-Dsps-jKu.js → index-Tw3Os-DN.js} +1 -1
  240. langflow/frontend/assets/{index-CFDvOtKC.js → index-X0guhYF8.js} +1 -1
  241. langflow/frontend/assets/{index-BX5D-USa.js → index-dJWNxIRH.js} +1 -1
  242. langflow/frontend/assets/{index-BRYjyhAd.js → index-dcJ8-agu.js} +1 -1
  243. langflow/frontend/assets/{index-Ui4xUImO.js → index-eo2mAtL-.js} +1 -1
  244. langflow/frontend/assets/{index-CxvP91st.js → index-hG24k5xJ.js} +1 -1
  245. langflow/frontend/assets/{index-CVQmT7ZL.js → index-h_aSZHf3.js} +1 -1
  246. langflow/frontend/assets/{index-BIXaW2aY.js → index-hbndqB9B.js} +1 -1
  247. langflow/frontend/assets/{index-DIkNW9Cd.js → index-iJngutFo.js} +1 -1
  248. langflow/frontend/assets/{index-BWmPX4iQ.js → index-lTpteg8t.js} +1 -1
  249. langflow/frontend/assets/{index-xuIrH2Dq.js → index-lZX9AvZW.js} +1 -1
  250. langflow/frontend/assets/{index-yCHsaqs8.js → index-m8QA6VNM.js} +1 -1
  251. langflow/frontend/assets/{index-BkPYpfgw.js → index-o0D2S7xW.js} +1 -1
  252. langflow/frontend/assets/{index-DpClkXIV.js → index-ovFJ_0J6.js} +1 -1
  253. langflow/frontend/assets/{index-CmplyEaa.js → index-pYJJOcma.js} +1 -1
  254. langflow/frontend/assets/{index-CJo_cyWW.js → index-sI75DsdM.js} +1 -1
  255. langflow/frontend/assets/{index-nVwHLjuV.js → index-xvFOmxx4.js} +1 -1
  256. langflow/frontend/assets/{index-LbYjHKkn.js → index-z3SRY-mX.js} +1 -1
  257. langflow/frontend/assets/lazyIconImports-D97HEZkE.js +2 -0
  258. langflow/frontend/assets/{use-post-add-user-BrBYH9eR.js → use-post-add-user-C0MdTpQ5.js} +1 -1
  259. langflow/frontend/index.html +2 -2
  260. langflow/graph/edge/base.py +2 -3
  261. langflow/graph/graph/base.py +15 -13
  262. langflow/graph/graph/constants.py +3 -0
  263. langflow/graph/utils.py +6 -6
  264. langflow/graph/vertex/base.py +4 -5
  265. langflow/graph/vertex/param_handler.py +1 -1
  266. langflow/graph/vertex/vertex_types.py +2 -2
  267. langflow/helpers/flow.py +1 -1
  268. langflow/initial_setup/setup.py +32 -30
  269. langflow/initial_setup/starter_projects/Basic Prompt Chaining.json +26 -0
  270. langflow/initial_setup/starter_projects/Basic Prompting.json +26 -0
  271. langflow/initial_setup/starter_projects/Blog Writer.json +58 -2
  272. langflow/initial_setup/starter_projects/Custom Component Generator.json +37 -2
  273. langflow/initial_setup/starter_projects/Document Q&A.json +27 -1
  274. langflow/initial_setup/starter_projects/Financial Report Parser.json +43 -0
  275. langflow/initial_setup/starter_projects/Hybrid Search RAG.json +83 -1
  276. langflow/initial_setup/starter_projects/Image Sentiment Analysis.json +43 -0
  277. langflow/initial_setup/starter_projects/Instagram Copywriter.json +51 -3
  278. langflow/initial_setup/starter_projects/Invoice Summarizer.json +40 -1
  279. langflow/initial_setup/starter_projects/Knowledge Ingestion.json +73 -2
  280. langflow/initial_setup/starter_projects/Knowledge Retrieval.json +63 -0
  281. langflow/initial_setup/starter_projects/Market Research.json +59 -3
  282. langflow/initial_setup/starter_projects/Meeting Summary.json +101 -6
  283. langflow/initial_setup/starter_projects/Memory Chatbot.json +37 -2
  284. langflow/initial_setup/starter_projects/News Aggregator.json +63 -3
  285. langflow/initial_setup/starter_projects/Nvidia Remix.json +69 -4
  286. langflow/initial_setup/starter_projects/Pok/303/251dex Agent.json" +48 -1
  287. langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +44 -1
  288. langflow/initial_setup/starter_projects/Price Deal Finder.json +57 -5
  289. langflow/initial_setup/starter_projects/Research Agent.json +42 -3
  290. langflow/initial_setup/starter_projects/Research Translation Loop.json +66 -0
  291. langflow/initial_setup/starter_projects/SEO Keyword Generator.json +17 -0
  292. langflow/initial_setup/starter_projects/SaaS Pricing.json +27 -1
  293. langflow/initial_setup/starter_projects/Search agent.json +40 -1
  294. langflow/initial_setup/starter_projects/Sequential Tasks Agents.json +76 -7
  295. langflow/initial_setup/starter_projects/Simple Agent.json +59 -3
  296. langflow/initial_setup/starter_projects/Social Media Agent.json +77 -1
  297. langflow/initial_setup/starter_projects/Text Sentiment Analysis.json +35 -1
  298. langflow/initial_setup/starter_projects/Travel Planning Agents.json +51 -3
  299. langflow/initial_setup/starter_projects/Twitter Thread Generator.json +80 -0
  300. langflow/initial_setup/starter_projects/Vector Store RAG.json +110 -3
  301. langflow/initial_setup/starter_projects/Youtube Analysis.json +84 -3
  302. langflow/initial_setup/starter_projects/vector_store_rag.py +1 -1
  303. langflow/interface/components.py +23 -22
  304. langflow/interface/initialize/loading.py +5 -5
  305. langflow/interface/run.py +1 -1
  306. langflow/interface/utils.py +1 -1
  307. langflow/io/__init__.py +0 -1
  308. langflow/langflow_launcher.py +1 -1
  309. langflow/load/load.py +2 -7
  310. langflow/logging/__init__.py +0 -1
  311. langflow/logging/logger.py +191 -115
  312. langflow/logging/setup.py +1 -1
  313. langflow/main.py +37 -52
  314. langflow/memory.py +7 -7
  315. langflow/middleware.py +1 -1
  316. langflow/processing/process.py +6 -3
  317. langflow/schema/artifact.py +2 -2
  318. langflow/schema/data.py +10 -2
  319. langflow/schema/dataframe.py +1 -1
  320. langflow/schema/message.py +1 -1
  321. langflow/serialization/serialization.py +1 -1
  322. langflow/services/auth/mcp_encryption.py +104 -0
  323. langflow/services/auth/utils.py +2 -2
  324. langflow/services/cache/disk.py +1 -1
  325. langflow/services/cache/service.py +3 -3
  326. langflow/services/database/models/flow/model.py +2 -7
  327. langflow/services/database/models/transactions/crud.py +2 -2
  328. langflow/services/database/models/user/crud.py +2 -2
  329. langflow/services/database/service.py +8 -8
  330. langflow/services/database/utils.py +6 -5
  331. langflow/services/deps.py +2 -3
  332. langflow/services/factory.py +1 -1
  333. langflow/services/flow/flow_runner.py +7 -12
  334. langflow/services/job_queue/service.py +16 -15
  335. langflow/services/manager.py +3 -4
  336. langflow/services/settings/auth.py +1 -1
  337. langflow/services/settings/base.py +3 -8
  338. langflow/services/settings/feature_flags.py +1 -1
  339. langflow/services/settings/manager.py +1 -1
  340. langflow/services/settings/utils.py +1 -1
  341. langflow/services/socket/__init__.py +0 -1
  342. langflow/services/socket/service.py +3 -3
  343. langflow/services/socket/utils.py +4 -4
  344. langflow/services/state/service.py +1 -2
  345. langflow/services/storage/factory.py +1 -1
  346. langflow/services/storage/local.py +9 -8
  347. langflow/services/storage/s3.py +11 -10
  348. langflow/services/store/service.py +3 -3
  349. langflow/services/store/utils.py +3 -2
  350. langflow/services/task/temp_flow_cleanup.py +7 -7
  351. langflow/services/telemetry/service.py +10 -10
  352. langflow/services/tracing/arize_phoenix.py +2 -2
  353. langflow/services/tracing/langfuse.py +1 -1
  354. langflow/services/tracing/langsmith.py +1 -1
  355. langflow/services/tracing/langwatch.py +1 -1
  356. langflow/services/tracing/opik.py +1 -1
  357. langflow/services/tracing/service.py +25 -6
  358. langflow/services/tracing/traceloop.py +245 -0
  359. langflow/services/utils.py +7 -7
  360. langflow/services/variable/kubernetes.py +3 -3
  361. langflow/services/variable/kubernetes_secrets.py +2 -1
  362. langflow/services/variable/service.py +5 -5
  363. langflow/utils/component_utils.py +9 -6
  364. langflow/utils/util.py +5 -5
  365. langflow/utils/validate.py +3 -3
  366. langflow/utils/voice_utils.py +2 -2
  367. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/METADATA +2 -1
  368. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/RECORD +393 -374
  369. langflow/components/vectorstores/redis.py +0 -89
  370. langflow/frontend/assets/index-C26RqKWL.js +0 -1
  371. langflow/frontend/assets/index-CqS7zir1.css +0 -1
  372. langflow/frontend/assets/lazyIconImports-t6wEndt1.js +0 -2
  373. /langflow/components/{vectorstores → FAISS}/faiss.py +0 -0
  374. /langflow/components/{vectorstores → cassandra}/cassandra.py +0 -0
  375. /langflow/components/{datastax/cassandra.py → cassandra/cassandra_chat.py} +0 -0
  376. /langflow/components/{vectorstores → cassandra}/cassandra_graph.py +0 -0
  377. /langflow/components/{vectorstores → chroma}/chroma.py +0 -0
  378. /langflow/components/{vectorstores → clickhouse}/clickhouse.py +0 -0
  379. /langflow/components/{vectorstores → couchbase}/couchbase.py +0 -0
  380. /langflow/components/{vectorstores → datastax}/astradb.py +0 -0
  381. /langflow/components/{vectorstores → datastax}/astradb_graph.py +0 -0
  382. /langflow/components/{vectorstores → datastax}/graph_rag.py +0 -0
  383. /langflow/components/{vectorstores → datastax}/hcd.py +0 -0
  384. /langflow/components/{vectorstores → elastic}/elasticsearch.py +0 -0
  385. /langflow/components/{vectorstores → elastic}/opensearch.py +0 -0
  386. /langflow/components/{vectorstores → milvus}/milvus.py +0 -0
  387. /langflow/components/{vectorstores → mongodb}/mongodb_atlas.py +0 -0
  388. /langflow/components/{vectorstores → pgvector}/pgvector.py +0 -0
  389. /langflow/components/{vectorstores → pinecone}/pinecone.py +0 -0
  390. /langflow/components/{vectorstores → qdrant}/qdrant.py +0 -0
  391. /langflow/components/{vectorstores → supabase}/supabase.py +0 -0
  392. /langflow/components/{vectorstores → upstash}/upstash.py +0 -0
  393. /langflow/components/{vectorstores → vectara}/vectara.py +0 -0
  394. /langflow/components/{vectorstores → vectara}/vectara_rag.py +0 -0
  395. /langflow/components/{vectorstores → weaviate}/weaviate.py +0 -0
  396. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/WHEEL +0 -0
  397. {langflow_base_nightly-0.5.0.dev37.dist-info → langflow_base_nightly-0.5.0.dev39.dist-info}/entry_points.txt +0 -0
@@ -7,8 +7,8 @@ import os
7
7
  from typing import TYPE_CHECKING, Any
8
8
 
9
9
  import pandas as pd
10
- from loguru import logger
11
10
 
11
+ from langflow.logging.logger import logger
12
12
  from langflow.schema.data import Data
13
13
  from langflow.services.deps import get_storage_service
14
14
  from langflow.services.storage.service import StorageService
@@ -7,12 +7,12 @@ from typing import TYPE_CHECKING, Any, cast
7
7
 
8
8
  import yaml
9
9
  from langchain_core.messages import AIMessage, AIMessageChunk
10
- from loguru import logger
11
10
 
12
11
  from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes, ResultData
13
12
  from langflow.graph.utils import UnbuiltObject, log_vertex_build, rewrite_file_path
14
13
  from langflow.graph.vertex.base import Vertex
15
14
  from langflow.graph.vertex.exceptions import NoComponentInstanceError
15
+ from langflow.logging.logger import logger
16
16
  from langflow.schema.artifact import ArtifactType
17
17
  from langflow.schema.data import Data
18
18
  from langflow.schema.message import Message
@@ -427,7 +427,7 @@ class InterfaceVertex(ComponentVertex):
427
427
  # Update artifacts with the message
428
428
  # and remove the stream_url
429
429
  self.finalize_build()
430
- logger.debug(f"Streamed message: {complete_message}")
430
+ await logger.adebug(f"Streamed message: {complete_message}")
431
431
  # Set the result in the vertex of origin
432
432
  edges = self.get_edge_with_target(self.id)
433
433
  for edge in edges:
langflow/helpers/flow.py CHANGED
@@ -4,10 +4,10 @@ from typing import TYPE_CHECKING, Any, cast
4
4
  from uuid import UUID
5
5
 
6
6
  from fastapi import HTTPException
7
- from loguru import logger
8
7
  from pydantic.v1 import BaseModel, Field, create_model
9
8
  from sqlmodel import select
10
9
 
10
+ from langflow.logging.logger import logger
11
11
  from langflow.schema.schema import INPUT_FIELD_NAME
12
12
  from langflow.services.database.models.flow.model import Flow, FlowRead
13
13
  from langflow.services.deps import get_settings_service, session_scope
@@ -19,7 +19,6 @@ import orjson
19
19
  import sqlalchemy as sa
20
20
  from aiofile import async_open
21
21
  from emoji import demojize, purely_emoji
22
- from loguru import logger
23
22
  from sqlalchemy.exc import NoResultFound
24
23
  from sqlalchemy.orm import selectinload
25
24
  from sqlmodel import col, select
@@ -33,6 +32,7 @@ from langflow.base.constants import (
33
32
  SKIPPED_FIELD_ATTRIBUTES,
34
33
  )
35
34
  from langflow.initial_setup.constants import STARTER_FOLDER_DESCRIPTION, STARTER_FOLDER_NAME
35
+ from langflow.logging.logger import logger
36
36
  from langflow.services.auth.utils import create_super_user
37
37
  from langflow.services.database.models.flow.model import Flow, FlowCreate
38
38
  from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME
@@ -517,7 +517,7 @@ def log_node_changes(node_changes_log) -> None:
517
517
  async def load_starter_projects(retries=3, delay=1) -> list[tuple[anyio.Path, dict]]:
518
518
  starter_projects = []
519
519
  folder = anyio.Path(__file__).parent / "starter_projects"
520
- logger.debug("Loading starter projects")
520
+ await logger.adebug("Loading starter projects")
521
521
  async for file in folder.glob("*.json"):
522
522
  attempt = 0
523
523
  while attempt < retries:
@@ -533,7 +533,7 @@ async def load_starter_projects(retries=3, delay=1) -> list[tuple[anyio.Path, di
533
533
  msg = f"Error loading starter project {file}: {e}"
534
534
  raise ValueError(msg) from e
535
535
  await asyncio.sleep(delay) # Wait before retrying
536
- logger.debug(f"Loaded {len(starter_projects)} starter projects")
536
+ await logger.adebug(f"Loaded {len(starter_projects)} starter projects")
537
537
  return starter_projects
538
538
 
539
539
 
@@ -580,7 +580,7 @@ async def copy_profile_pictures() -> None:
580
580
  await dst_file.parent.mkdir(parents=True, exist_ok=True)
581
581
  # Offload blocking I/O to a thread
582
582
  await asyncio.to_thread(shutil.copy2, str(src_file), str(dst_file))
583
- logger.debug(f"Copied file '{rel_path}'")
583
+ await logger.adebug(f"Copied file '{rel_path}'")
584
584
 
585
585
  tasks = []
586
586
  async for src_file in origin.rglob("*"):
@@ -596,7 +596,7 @@ async def copy_profile_pictures() -> None:
596
596
  await asyncio.gather(*tasks)
597
597
 
598
598
  except Exception as exc:
599
- logger.exception("Error copying profile pictures")
599
+ await logger.aexception("Error copying profile pictures")
600
600
  msg = "An error occurred while copying profile pictures."
601
601
  raise RuntimeError(msg) from exc
602
602
 
@@ -633,7 +633,7 @@ async def update_project_file(project_path: anyio.Path, project: dict, updated_p
633
633
  project["data"] = updated_project_data
634
634
  async with async_open(str(project_path), "w", encoding="utf-8") as f:
635
635
  await f.write(orjson.dumps(project, option=ORJSON_OPTIONS).decode())
636
- logger.debug(f"Updated starter project {project['name']} file")
636
+ await logger.adebug(f"Updated starter project {project['name']} file")
637
637
 
638
638
 
639
639
  def update_existing_project(
@@ -734,7 +734,7 @@ async def load_flows_from_directory() -> None:
734
734
  if not flows_path:
735
735
  return
736
736
  if not settings_service.auth_settings.AUTO_LOGIN:
737
- logger.warning("AUTO_LOGIN is disabled, not loading flows from directory")
737
+ await logger.awarning("AUTO_LOGIN is disabled, not loading flows from directory")
738
738
  return
739
739
 
740
740
  async with session_scope() as session:
@@ -749,7 +749,7 @@ async def load_flows_from_directory() -> None:
749
749
  for file_path in await asyncio.to_thread(Path(flows_path).iterdir):
750
750
  if not await anyio.Path(file_path).is_file() or file_path.suffix != ".json":
751
751
  continue
752
- logger.info(f"Loading flow from file: {file_path.name}")
752
+ await logger.ainfo(f"Loading flow from file: {file_path.name}")
753
753
  async with async_open(str(file_path), "r", encoding="utf-8") as f:
754
754
  content = await f.read()
755
755
  await upsert_flow_from_file(content, file_path.stem, session, user.id)
@@ -794,7 +794,7 @@ async def load_bundles_from_urls() -> tuple[list[TemporaryDirectory], list[str]]
794
794
  if not bundle_urls:
795
795
  return [], []
796
796
  if not settings_service.auth_settings.AUTO_LOGIN:
797
- logger.warning("AUTO_LOGIN is disabled, not loading flows from URLs")
797
+ await logger.awarning("AUTO_LOGIN is disabled, not loading flows from URLs")
798
798
 
799
799
  async with session_scope() as session:
800
800
  user = await get_user_by_username(session, settings_service.auth_settings.SUPERUSER)
@@ -844,13 +844,13 @@ async def upsert_flow_from_file(file_content: AnyStr, filename: str, session: As
844
844
  try:
845
845
  flow_id = UUID(flow_id)
846
846
  except ValueError:
847
- logger.error(f"Invalid UUID string: {flow_id}")
847
+ await logger.aerror(f"Invalid UUID string: {flow_id}")
848
848
  return
849
849
 
850
850
  existing = await find_existing_flow(session, flow_id, flow_endpoint_name)
851
851
  if existing:
852
- logger.debug(f"Found existing flow: {existing.name}")
853
- logger.info(f"Updating existing flow: {flow_id} with endpoint name {flow_endpoint_name}")
852
+ await logger.adebug(f"Found existing flow: {existing.name}")
853
+ await logger.ainfo(f"Updating existing flow: {flow_id} with endpoint name {flow_endpoint_name}")
854
854
  for key, value in flow.items():
855
855
  if hasattr(existing, key):
856
856
  # flow dict from json and db representation are not 100% the same
@@ -867,12 +867,12 @@ async def upsert_flow_from_file(file_content: AnyStr, filename: str, session: As
867
867
  try:
868
868
  existing.id = UUID(existing.id)
869
869
  except ValueError:
870
- logger.error(f"Invalid UUID string: {existing.id}")
870
+ await logger.aerror(f"Invalid UUID string: {existing.id}")
871
871
  return
872
872
 
873
873
  session.add(existing)
874
874
  else:
875
- logger.info(f"Creating new flow: {flow_id} with endpoint name {flow_endpoint_name}")
875
+ await logger.ainfo(f"Creating new flow: {flow_id} with endpoint name {flow_endpoint_name}")
876
876
 
877
877
  # Assign the newly created flow to the default folder
878
878
  folder = await get_or_create_default_folder(session, user_id)
@@ -886,15 +886,15 @@ async def upsert_flow_from_file(file_content: AnyStr, filename: str, session: As
886
886
 
887
887
  async def find_existing_flow(session, flow_id, flow_endpoint_name):
888
888
  if flow_endpoint_name:
889
- logger.debug(f"flow_endpoint_name: {flow_endpoint_name}")
889
+ await logger.adebug(f"flow_endpoint_name: {flow_endpoint_name}")
890
890
  stmt = select(Flow).where(Flow.endpoint_name == flow_endpoint_name)
891
891
  if existing := (await session.exec(stmt)).first():
892
- logger.debug(f"Found existing flow by endpoint name: {existing.name}")
892
+ await logger.adebug(f"Found existing flow by endpoint name: {existing.name}")
893
893
  return existing
894
894
 
895
895
  stmt = select(Flow).where(Flow.id == flow_id)
896
896
  if existing := (await session.exec(stmt)).first():
897
- logger.debug(f"Found existing flow by id: {flow_id}")
897
+ await logger.adebug(f"Found existing flow by id: {flow_id}")
898
898
  return existing
899
899
  return None
900
900
 
@@ -916,7 +916,7 @@ async def create_or_update_starter_projects(all_types_dict: dict) -> None:
916
916
  starter_projects = await load_starter_projects()
917
917
 
918
918
  if get_settings_service().settings.update_starter_projects:
919
- logger.debug("Updating starter projects")
919
+ await logger.adebug("Updating starter projects")
920
920
  # 1. Delete all existing starter projects
921
921
  successfully_updated_projects = 0
922
922
  await delete_starter_projects(session, new_folder.id)
@@ -959,13 +959,13 @@ async def create_or_update_starter_projects(all_types_dict: dict) -> None:
959
959
  new_folder_id=new_folder.id,
960
960
  )
961
961
  except Exception: # noqa: BLE001
962
- logger.exception(f"Error while creating starter project {project_name}")
962
+ await logger.aexception(f"Error while creating starter project {project_name}")
963
963
 
964
964
  successfully_updated_projects += 1
965
- logger.debug(f"Successfully updated {successfully_updated_projects} starter projects")
965
+ await logger.adebug(f"Successfully updated {successfully_updated_projects} starter projects")
966
966
  else:
967
967
  # Even if we're not updating starter projects, we still need to create any that don't exist
968
- logger.debug("Creating new starter projects")
968
+ await logger.adebug("Creating new starter projects")
969
969
  successfully_created_projects = 0
970
970
  existing_flows = await get_all_flows_similar_to_project(session, new_folder.id)
971
971
  existing_flow_names = [existing_flow.name for existing_flow in existing_flows]
@@ -997,9 +997,9 @@ async def create_or_update_starter_projects(all_types_dict: dict) -> None:
997
997
  new_folder_id=new_folder.id,
998
998
  )
999
999
  except Exception: # noqa: BLE001
1000
- logger.exception(f"Error while creating starter project {project_name}")
1000
+ await logger.aexception(f"Error while creating starter project {project_name}")
1001
1001
  successfully_created_projects += 1
1002
- logger.debug(f"Successfully created {successfully_created_projects} starter projects")
1002
+ await logger.adebug(f"Successfully created {successfully_created_projects} starter projects")
1003
1003
 
1004
1004
 
1005
1005
  async def initialize_super_user_if_needed() -> None:
@@ -1016,7 +1016,7 @@ async def initialize_super_user_if_needed() -> None:
1016
1016
  super_user = await create_super_user(db=async_session, username=username, password=password)
1017
1017
  await get_variable_service().initialize_user_variables(super_user.id, async_session)
1018
1018
  _ = await get_or_create_default_folder(async_session, super_user.id)
1019
- logger.debug("Super user initialized")
1019
+ await logger.adebug("Super user initialized")
1020
1020
 
1021
1021
 
1022
1022
  async def get_or_create_default_folder(session: AsyncSession, user_id: UUID) -> FolderRead:
@@ -1082,22 +1082,24 @@ async def sync_flows_from_fs():
1082
1082
  await session.commit()
1083
1083
  await session.refresh(flow)
1084
1084
  except Exception: # noqa: BLE001
1085
- logger.exception(f"Couldn't update flow {flow.id} in database from path {path}")
1085
+ await logger.aexception(
1086
+ f"Couldn't update flow {flow.id} in database from path {path}"
1087
+ )
1086
1088
  flow_mtimes[flow.id] = new_mtime
1087
1089
  except Exception: # noqa: BLE001
1088
- logger.exception(f"Error while handling flow file {path}")
1090
+ await logger.aexception(f"Error while handling flow file {path}")
1089
1091
  except asyncio.CancelledError:
1090
- logger.debug("Flow sync cancelled")
1092
+ await logger.adebug("Flow sync cancelled")
1091
1093
  break
1092
1094
  except (sa.exc.OperationalError, ValueError) as e:
1093
1095
  if "no active connection" in str(e) or "connection is closed" in str(e):
1094
- logger.debug("Database connection lost, assuming shutdown")
1096
+ await logger.adebug("Database connection lost, assuming shutdown")
1095
1097
  break # Exit gracefully, don't error
1096
1098
  raise # Re-raise if it's a real connection problem
1097
1099
  except Exception: # noqa: BLE001
1098
- logger.exception("Error while syncing flows from database")
1100
+ await logger.aexception("Error while syncing flows from database")
1099
1101
  break
1100
1102
 
1101
1103
  await asyncio.sleep(fs_flows_polling_interval)
1102
1104
  except asyncio.CancelledError:
1103
- logger.debug("Flow sync task cancelled")
1105
+ await logger.adebug("Flow sync task cancelled")
@@ -363,6 +363,15 @@
363
363
  "lf_version": "1.5.0",
364
364
  "metadata": {
365
365
  "code_hash": "192913db3453",
366
+ "dependencies": {
367
+ "dependencies": [
368
+ {
369
+ "name": "langflow",
370
+ "version": null
371
+ }
372
+ ],
373
+ "total_dependencies": 1
374
+ },
366
375
  "module": "langflow.components.input_output.chat.ChatInput"
367
376
  },
368
377
  "output_types": [],
@@ -664,6 +673,23 @@
664
673
  "lf_version": "1.5.0",
665
674
  "metadata": {
666
675
  "code_hash": "6f74e04e39d5",
676
+ "dependencies": {
677
+ "dependencies": [
678
+ {
679
+ "name": "orjson",
680
+ "version": "3.10.15"
681
+ },
682
+ {
683
+ "name": "fastapi",
684
+ "version": "0.115.13"
685
+ },
686
+ {
687
+ "name": "langflow",
688
+ "version": null
689
+ }
690
+ ],
691
+ "total_dependencies": 3
692
+ },
667
693
  "module": "langflow.components.input_output.chat_output.ChatOutput"
668
694
  },
669
695
  "output_types": [],
@@ -118,6 +118,15 @@
118
118
  "lf_version": "1.4.2",
119
119
  "metadata": {
120
120
  "code_hash": "192913db3453",
121
+ "dependencies": {
122
+ "dependencies": [
123
+ {
124
+ "name": "langflow",
125
+ "version": null
126
+ }
127
+ ],
128
+ "total_dependencies": 1
129
+ },
121
130
  "module": "langflow.components.input_output.chat.ChatInput"
122
131
  },
123
132
  "output_types": [],
@@ -616,6 +625,23 @@
616
625
  "lf_version": "1.4.2",
617
626
  "metadata": {
618
627
  "code_hash": "6f74e04e39d5",
628
+ "dependencies": {
629
+ "dependencies": [
630
+ {
631
+ "name": "orjson",
632
+ "version": "3.10.15"
633
+ },
634
+ {
635
+ "name": "fastapi",
636
+ "version": "0.115.13"
637
+ },
638
+ {
639
+ "name": "langflow",
640
+ "version": null
641
+ }
642
+ ],
643
+ "total_dependencies": 3
644
+ },
619
645
  "module": "langflow.components.input_output.chat_output.ChatOutput"
620
646
  },
621
647
  "output_types": [],
@@ -353,6 +353,15 @@
353
353
  "lf_version": "1.4.2",
354
354
  "metadata": {
355
355
  "code_hash": "efdcba3771af",
356
+ "dependencies": {
357
+ "dependencies": [
358
+ {
359
+ "name": "langflow",
360
+ "version": null
361
+ }
362
+ ],
363
+ "total_dependencies": 1
364
+ },
356
365
  "module": "langflow.components.input_output.text.TextInputComponent"
357
366
  },
358
367
  "output_types": [],
@@ -469,6 +478,23 @@
469
478
  "lf_version": "1.4.2",
470
479
  "metadata": {
471
480
  "code_hash": "6f74e04e39d5",
481
+ "dependencies": {
482
+ "dependencies": [
483
+ {
484
+ "name": "orjson",
485
+ "version": "3.10.15"
486
+ },
487
+ {
488
+ "name": "fastapi",
489
+ "version": "0.115.13"
490
+ },
491
+ {
492
+ "name": "langflow",
493
+ "version": null
494
+ }
495
+ ],
496
+ "total_dependencies": 3
497
+ },
472
498
  "module": "langflow.components.input_output.chat_output.ChatOutput"
473
499
  },
474
500
  "output_types": [],
@@ -792,6 +818,15 @@
792
818
  "lf_version": "1.4.2",
793
819
  "metadata": {
794
820
  "code_hash": "556209520650",
821
+ "dependencies": {
822
+ "dependencies": [
823
+ {
824
+ "name": "langflow",
825
+ "version": null
826
+ }
827
+ ],
828
+ "total_dependencies": 1
829
+ },
795
830
  "module": "langflow.components.processing.parser.ParserComponent"
796
831
  },
797
832
  "minimized": false,
@@ -978,7 +1013,28 @@
978
1013
  "legacy": false,
979
1014
  "lf_version": "1.4.2",
980
1015
  "metadata": {
981
- "code_hash": "a81817a7f244",
1016
+ "code_hash": "252132357639",
1017
+ "dependencies": {
1018
+ "dependencies": [
1019
+ {
1020
+ "name": "requests",
1021
+ "version": "2.32.4"
1022
+ },
1023
+ {
1024
+ "name": "bs4",
1025
+ "version": "4.12.3"
1026
+ },
1027
+ {
1028
+ "name": "langchain_community",
1029
+ "version": "0.3.21"
1030
+ },
1031
+ {
1032
+ "name": "langflow",
1033
+ "version": null
1034
+ }
1035
+ ],
1036
+ "total_dependencies": 4
1037
+ },
982
1038
  "module": "langflow.components.data.url.URLComponent"
983
1039
  },
984
1040
  "minimized": false,
@@ -1069,7 +1125,7 @@
1069
1125
  "show": true,
1070
1126
  "title_case": false,
1071
1127
  "type": "code",
1072
- "value": "import re\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom langchain_community.document_loaders import RecursiveUrlLoader\nfrom loguru import logger\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.helpers.data import safe_convert\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, Output, SliderInput, TableInput\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.services.deps import get_settings_service\n\n# Constants\nDEFAULT_TIMEOUT = 30\nDEFAULT_MAX_DEPTH = 1\nDEFAULT_FORMAT = \"Text\"\nURL_REGEX = re.compile(\n r\"^(https?:\\/\\/)?\" r\"(www\\.)?\" r\"([a-zA-Z0-9.-]+)\" r\"(\\.[a-zA-Z]{2,})?\" r\"(:\\d+)?\" r\"(\\/[^\\s]*)?$\",\n re.IGNORECASE,\n)\n\n\nclass URLComponent(Component):\n \"\"\"A component that loads and parses content from web pages recursively.\n\n This component allows fetching content from one or more URLs, with options to:\n - Control crawl depth\n - Prevent crawling outside the root domain\n - Use async loading for better performance\n - Extract either raw HTML or clean text\n - Configure request headers and timeouts\n \"\"\"\n\n display_name = \"URL\"\n description = \"Fetch content from one or more web pages, following links recursively.\"\n documentation: str = \"https://docs.langflow.org/components-data#url\"\n icon = \"layout-template\"\n name = \"URLComponent\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs to crawl recursively, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n placeholder=\"Enter a URL...\",\n list_add_label=\"Add URL\",\n input_types=[],\n ),\n SliderInput(\n name=\"max_depth\",\n display_name=\"Depth\",\n info=(\n \"Controls how many 'clicks' away from the initial page the crawler will go:\\n\"\n \"- depth 1: only the initial page\\n\"\n \"- depth 2: initial page + all pages linked directly from it\\n\"\n \"- depth 3: initial page + direct links + links found on those direct link pages\\n\"\n \"Note: This is about link traversal, not URL path depth.\"\n ),\n value=DEFAULT_MAX_DEPTH,\n range_spec=RangeSpec(min=1, max=5, step=1),\n required=False,\n min_label=\" \",\n max_label=\" \",\n min_label_icon=\"None\",\n max_label_icon=\"None\",\n # slider_input=True\n ),\n BoolInput(\n name=\"prevent_outside\",\n display_name=\"Prevent Outside\",\n info=(\n \"If enabled, only crawls URLs within the same domain as the root URL. \"\n \"This helps prevent the crawler from going to external websites.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"use_async\",\n display_name=\"Use Async\",\n info=(\n \"If enabled, uses asynchronous loading which can be significantly faster \"\n \"but might use more system resources.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output Format\",\n info=\"Output Format. Use 'Text' to extract the text from the HTML or 'HTML' for the raw HTML content.\",\n options=[\"Text\", \"HTML\"],\n value=DEFAULT_FORMAT,\n advanced=True,\n ),\n IntInput(\n name=\"timeout\",\n display_name=\"Timeout\",\n info=\"Timeout for the request in seconds.\",\n value=DEFAULT_TIMEOUT,\n required=False,\n advanced=True,\n ),\n TableInput(\n name=\"headers\",\n display_name=\"Headers\",\n info=\"The headers to send with the request\",\n table_schema=[\n {\n \"name\": \"key\",\n \"display_name\": \"Header\",\n \"type\": \"str\",\n \"description\": \"Header name\",\n },\n {\n \"name\": \"value\",\n \"display_name\": \"Value\",\n \"type\": \"str\",\n \"description\": \"Header value\",\n },\n ],\n value=[{\"key\": \"User-Agent\", \"value\": get_settings_service().settings.user_agent}],\n advanced=True,\n input_types=[\"DataFrame\"],\n ),\n BoolInput(\n name=\"filter_text_html\",\n display_name=\"Filter Text/HTML\",\n info=\"If enabled, filters out text/css content type from the results.\",\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"continue_on_failure\",\n display_name=\"Continue on Failure\",\n info=\"If enabled, continues crawling even if some requests fail.\",\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"check_response_status\",\n display_name=\"Check Response Status\",\n info=\"If enabled, checks the response status of the request.\",\n value=False,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"autoset_encoding\",\n display_name=\"Autoset Encoding\",\n info=\"If enabled, automatically sets the encoding of the request.\",\n value=True,\n required=False,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Extracted Pages\", name=\"page_results\", method=\"fetch_content\"),\n Output(display_name=\"Raw Content\", name=\"raw_results\", method=\"fetch_content_as_message\", tool_mode=False),\n ]\n\n @staticmethod\n def validate_url(url: str) -> bool:\n \"\"\"Validates if the given string matches URL pattern.\n\n Args:\n url: The URL string to validate\n\n Returns:\n bool: True if the URL is valid, False otherwise\n \"\"\"\n return bool(URL_REGEX.match(url))\n\n def ensure_url(self, url: str) -> str:\n \"\"\"Ensures the given string is a valid URL.\n\n Args:\n url: The URL string to validate and normalize\n\n Returns:\n str: The normalized URL\n\n Raises:\n ValueError: If the URL is invalid\n \"\"\"\n url = url.strip()\n if not url.startswith((\"http://\", \"https://\")):\n url = \"https://\" + url\n\n if not self.validate_url(url):\n msg = f\"Invalid URL: {url}\"\n raise ValueError(msg)\n\n return url\n\n def _create_loader(self, url: str) -> RecursiveUrlLoader:\n \"\"\"Creates a RecursiveUrlLoader instance with the configured settings.\n\n Args:\n url: The URL to load\n\n Returns:\n RecursiveUrlLoader: Configured loader instance\n \"\"\"\n headers_dict = {header[\"key\"]: header[\"value\"] for header in self.headers}\n extractor = (lambda x: x) if self.format == \"HTML\" else (lambda x: BeautifulSoup(x, \"lxml\").get_text())\n\n return RecursiveUrlLoader(\n url=url,\n max_depth=self.max_depth,\n prevent_outside=self.prevent_outside,\n use_async=self.use_async,\n extractor=extractor,\n timeout=self.timeout,\n headers=headers_dict,\n check_response_status=self.check_response_status,\n continue_on_failure=self.continue_on_failure,\n base_url=url, # Add base_url to ensure consistent domain crawling\n autoset_encoding=self.autoset_encoding, # Enable automatic encoding detection\n exclude_dirs=[], # Allow customization of excluded directories\n link_regex=None, # Allow customization of link filtering\n )\n\n def fetch_url_contents(self) -> list[dict]:\n \"\"\"Load documents from the configured URLs.\n\n Returns:\n List[Data]: List of Data objects containing the fetched content\n\n Raises:\n ValueError: If no valid URLs are provided or if there's an error loading documents\n \"\"\"\n try:\n urls = list({self.ensure_url(url) for url in self.urls if url.strip()})\n logger.debug(f\"URLs: {urls}\")\n if not urls:\n msg = \"No valid URLs provided.\"\n raise ValueError(msg)\n\n all_docs = []\n for url in urls:\n logger.debug(f\"Loading documents from {url}\")\n\n try:\n loader = self._create_loader(url)\n docs = loader.load()\n\n if not docs:\n logger.warning(f\"No documents found for {url}\")\n continue\n\n logger.debug(f\"Found {len(docs)} documents from {url}\")\n all_docs.extend(docs)\n\n except requests.exceptions.RequestException as e:\n logger.exception(f\"Error loading documents from {url}: {e}\")\n continue\n\n if not all_docs:\n msg = \"No documents were successfully loaded from any URL\"\n raise ValueError(msg)\n\n # data = [Data(text=doc.page_content, **doc.metadata) for doc in all_docs]\n data = [\n {\n \"text\": safe_convert(doc.page_content, clean_data=True),\n \"url\": doc.metadata.get(\"source\", \"\"),\n \"title\": doc.metadata.get(\"title\", \"\"),\n \"description\": doc.metadata.get(\"description\", \"\"),\n \"content_type\": doc.metadata.get(\"content_type\", \"\"),\n \"language\": doc.metadata.get(\"language\", \"\"),\n }\n for doc in all_docs\n ]\n except Exception as e:\n error_msg = e.message if hasattr(e, \"message\") else e\n msg = f\"Error loading documents: {error_msg!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n return data\n\n def fetch_content(self) -> DataFrame:\n \"\"\"Convert the documents to a DataFrame.\"\"\"\n return DataFrame(data=self.fetch_url_contents())\n\n def fetch_content_as_message(self) -> Message:\n \"\"\"Convert the documents to a Message.\"\"\"\n url_contents = self.fetch_url_contents()\n return Message(text=\"\\n\\n\".join([x[\"text\"] for x in url_contents]), data={\"data\": url_contents})\n"
1128
+ "value": "import re\n\nimport requests\nfrom bs4 import BeautifulSoup\nfrom langchain_community.document_loaders import RecursiveUrlLoader\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.helpers.data import safe_convert\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, Output, SliderInput, TableInput\nfrom langflow.logging.logger import logger\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.services.deps import get_settings_service\n\n# Constants\nDEFAULT_TIMEOUT = 30\nDEFAULT_MAX_DEPTH = 1\nDEFAULT_FORMAT = \"Text\"\nURL_REGEX = re.compile(\n r\"^(https?:\\/\\/)?\" r\"(www\\.)?\" r\"([a-zA-Z0-9.-]+)\" r\"(\\.[a-zA-Z]{2,})?\" r\"(:\\d+)?\" r\"(\\/[^\\s]*)?$\",\n re.IGNORECASE,\n)\n\n\nclass URLComponent(Component):\n \"\"\"A component that loads and parses content from web pages recursively.\n\n This component allows fetching content from one or more URLs, with options to:\n - Control crawl depth\n - Prevent crawling outside the root domain\n - Use async loading for better performance\n - Extract either raw HTML or clean text\n - Configure request headers and timeouts\n \"\"\"\n\n display_name = \"URL\"\n description = \"Fetch content from one or more web pages, following links recursively.\"\n documentation: str = \"https://docs.langflow.org/components-data#url\"\n icon = \"layout-template\"\n name = \"URLComponent\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs to crawl recursively, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n placeholder=\"Enter a URL...\",\n list_add_label=\"Add URL\",\n input_types=[],\n ),\n SliderInput(\n name=\"max_depth\",\n display_name=\"Depth\",\n info=(\n \"Controls how many 'clicks' away from the initial page the crawler will go:\\n\"\n \"- depth 1: only the initial page\\n\"\n \"- depth 2: initial page + all pages linked directly from it\\n\"\n \"- depth 3: initial page + direct links + links found on those direct link pages\\n\"\n \"Note: This is about link traversal, not URL path depth.\"\n ),\n value=DEFAULT_MAX_DEPTH,\n range_spec=RangeSpec(min=1, max=5, step=1),\n required=False,\n min_label=\" \",\n max_label=\" \",\n min_label_icon=\"None\",\n max_label_icon=\"None\",\n # slider_input=True\n ),\n BoolInput(\n name=\"prevent_outside\",\n display_name=\"Prevent Outside\",\n info=(\n \"If enabled, only crawls URLs within the same domain as the root URL. \"\n \"This helps prevent the crawler from going to external websites.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"use_async\",\n display_name=\"Use Async\",\n info=(\n \"If enabled, uses asynchronous loading which can be significantly faster \"\n \"but might use more system resources.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output Format\",\n info=\"Output Format. Use 'Text' to extract the text from the HTML or 'HTML' for the raw HTML content.\",\n options=[\"Text\", \"HTML\"],\n value=DEFAULT_FORMAT,\n advanced=True,\n ),\n IntInput(\n name=\"timeout\",\n display_name=\"Timeout\",\n info=\"Timeout for the request in seconds.\",\n value=DEFAULT_TIMEOUT,\n required=False,\n advanced=True,\n ),\n TableInput(\n name=\"headers\",\n display_name=\"Headers\",\n info=\"The headers to send with the request\",\n table_schema=[\n {\n \"name\": \"key\",\n \"display_name\": \"Header\",\n \"type\": \"str\",\n \"description\": \"Header name\",\n },\n {\n \"name\": \"value\",\n \"display_name\": \"Value\",\n \"type\": \"str\",\n \"description\": \"Header value\",\n },\n ],\n value=[{\"key\": \"User-Agent\", \"value\": get_settings_service().settings.user_agent}],\n advanced=True,\n input_types=[\"DataFrame\"],\n ),\n BoolInput(\n name=\"filter_text_html\",\n display_name=\"Filter Text/HTML\",\n info=\"If enabled, filters out text/css content type from the results.\",\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"continue_on_failure\",\n display_name=\"Continue on Failure\",\n info=\"If enabled, continues crawling even if some requests fail.\",\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"check_response_status\",\n display_name=\"Check Response Status\",\n info=\"If enabled, checks the response status of the request.\",\n value=False,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"autoset_encoding\",\n display_name=\"Autoset Encoding\",\n info=\"If enabled, automatically sets the encoding of the request.\",\n value=True,\n required=False,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Extracted Pages\", name=\"page_results\", method=\"fetch_content\"),\n Output(display_name=\"Raw Content\", name=\"raw_results\", method=\"fetch_content_as_message\", tool_mode=False),\n ]\n\n @staticmethod\n def validate_url(url: str) -> bool:\n \"\"\"Validates if the given string matches URL pattern.\n\n Args:\n url: The URL string to validate\n\n Returns:\n bool: True if the URL is valid, False otherwise\n \"\"\"\n return bool(URL_REGEX.match(url))\n\n def ensure_url(self, url: str) -> str:\n \"\"\"Ensures the given string is a valid URL.\n\n Args:\n url: The URL string to validate and normalize\n\n Returns:\n str: The normalized URL\n\n Raises:\n ValueError: If the URL is invalid\n \"\"\"\n url = url.strip()\n if not url.startswith((\"http://\", \"https://\")):\n url = \"https://\" + url\n\n if not self.validate_url(url):\n msg = f\"Invalid URL: {url}\"\n raise ValueError(msg)\n\n return url\n\n def _create_loader(self, url: str) -> RecursiveUrlLoader:\n \"\"\"Creates a RecursiveUrlLoader instance with the configured settings.\n\n Args:\n url: The URL to load\n\n Returns:\n RecursiveUrlLoader: Configured loader instance\n \"\"\"\n headers_dict = {header[\"key\"]: header[\"value\"] for header in self.headers}\n extractor = (lambda x: x) if self.format == \"HTML\" else (lambda x: BeautifulSoup(x, \"lxml\").get_text())\n\n return RecursiveUrlLoader(\n url=url,\n max_depth=self.max_depth,\n prevent_outside=self.prevent_outside,\n use_async=self.use_async,\n extractor=extractor,\n timeout=self.timeout,\n headers=headers_dict,\n check_response_status=self.check_response_status,\n continue_on_failure=self.continue_on_failure,\n base_url=url, # Add base_url to ensure consistent domain crawling\n autoset_encoding=self.autoset_encoding, # Enable automatic encoding detection\n exclude_dirs=[], # Allow customization of excluded directories\n link_regex=None, # Allow customization of link filtering\n )\n\n def fetch_url_contents(self) -> list[dict]:\n \"\"\"Load documents from the configured URLs.\n\n Returns:\n List[Data]: List of Data objects containing the fetched content\n\n Raises:\n ValueError: If no valid URLs are provided or if there's an error loading documents\n \"\"\"\n try:\n urls = list({self.ensure_url(url) for url in self.urls if url.strip()})\n logger.debug(f\"URLs: {urls}\")\n if not urls:\n msg = \"No valid URLs provided.\"\n raise ValueError(msg)\n\n all_docs = []\n for url in urls:\n logger.debug(f\"Loading documents from {url}\")\n\n try:\n loader = self._create_loader(url)\n docs = loader.load()\n\n if not docs:\n logger.warning(f\"No documents found for {url}\")\n continue\n\n logger.debug(f\"Found {len(docs)} documents from {url}\")\n all_docs.extend(docs)\n\n except requests.exceptions.RequestException as e:\n logger.exception(f\"Error loading documents from {url}: {e}\")\n continue\n\n if not all_docs:\n msg = \"No documents were successfully loaded from any URL\"\n raise ValueError(msg)\n\n # data = [Data(text=doc.page_content, **doc.metadata) for doc in all_docs]\n data = [\n {\n \"text\": safe_convert(doc.page_content, clean_data=True),\n \"url\": doc.metadata.get(\"source\", \"\"),\n \"title\": doc.metadata.get(\"title\", \"\"),\n \"description\": doc.metadata.get(\"description\", \"\"),\n \"content_type\": doc.metadata.get(\"content_type\", \"\"),\n \"language\": doc.metadata.get(\"language\", \"\"),\n }\n for doc in all_docs\n ]\n except Exception as e:\n error_msg = e.message if hasattr(e, \"message\") else e\n msg = f\"Error loading documents: {error_msg!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n return data\n\n def fetch_content(self) -> DataFrame:\n \"\"\"Convert the documents to a DataFrame.\"\"\"\n return DataFrame(data=self.fetch_url_contents())\n\n def fetch_content_as_message(self) -> Message:\n \"\"\"Convert the documents to a Message.\"\"\"\n url_contents = self.fetch_url_contents()\n return Message(text=\"\\n\\n\".join([x[\"text\"] for x in url_contents]), data={\"data\": url_contents})\n"
1073
1129
  },
1074
1130
  "continue_on_failure": {
1075
1131
  "_input_type": "BoolInput",
@@ -237,7 +237,16 @@
237
237
  "legacy": false,
238
238
  "lf_version": "1.4.3",
239
239
  "metadata": {
240
- "code_hash": "5ca89b168f3f",
240
+ "code_hash": "464cc8b8fdd2",
241
+ "dependencies": {
242
+ "dependencies": [
243
+ {
244
+ "name": "langflow",
245
+ "version": null
246
+ }
247
+ ],
248
+ "total_dependencies": 1
249
+ },
241
250
  "module": "langflow.components.helpers.memory.MemoryComponent"
242
251
  },
243
252
  "output_types": [],
@@ -290,7 +299,7 @@
290
299
  "show": true,
291
300
  "title_case": false,
292
301
  "type": "code",
293
- "value": "from typing import Any, cast\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs.inputs import DropdownInput, HandleInput, IntInput, MessageTextInput, MultilineInput, TabInput\nfrom langflow.memory import aget_messages, astore_message\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\nfrom langflow.template.field.base import Output\nfrom langflow.utils.component_utils import set_current_fields, set_field_display\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Message History\"\n description = \"Stores or retrieves stored chat messages from Langflow tables or an external memory.\"\n documentation: str = \"https://docs.langflow.org/components-helpers#message-history\"\n icon = \"message-square-more\"\n name = \"Memory\"\n default_keys = [\"mode\", \"memory\"]\n mode_config = {\n \"Store\": [\"message\", \"memory\", \"sender\", \"sender_name\", \"session_id\"],\n \"Retrieve\": [\"n_messages\", \"order\", \"template\", \"memory\"],\n }\n\n inputs = [\n TabInput(\n name=\"mode\",\n display_name=\"Mode\",\n options=[\"Retrieve\", \"Store\"],\n value=\"Retrieve\",\n info=\"Operation mode: Store messages or Retrieve messages.\",\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"message\",\n display_name=\"Message\",\n info=\"The chat message to be stored.\",\n tool_mode=True,\n dynamic=True,\n show=False,\n ),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"Memory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"sender_type\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message. Might be Machine or User. \"\n \"If empty, the current sender parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n show=False,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n show=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n tool_mode=True,\n required=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n show=False,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Message\", name=\"messages_text\", method=\"retrieve_messages_as_text\", dynamic=True),\n Output(display_name=\"Dataframe\", name=\"dataframe\", method=\"retrieve_messages_dataframe\", dynamic=True),\n ]\n\n def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict:\n \"\"\"Dynamically show only the relevant output based on the selected output type.\"\"\"\n if field_name == \"mode\":\n # Start with empty outputs\n frontend_node[\"outputs\"] = []\n if field_value == \"Store\":\n frontend_node[\"outputs\"] = [\n Output(\n display_name=\"Stored Messages\",\n name=\"stored_messages\",\n method=\"store_message\",\n hidden=True,\n dynamic=True,\n )\n ]\n if field_value == \"Retrieve\":\n frontend_node[\"outputs\"] = [\n Output(\n display_name=\"Messages\", name=\"messages_text\", method=\"retrieve_messages_as_text\", dynamic=True\n ),\n Output(\n display_name=\"Dataframe\", name=\"dataframe\", method=\"retrieve_messages_dataframe\", dynamic=True\n ),\n ]\n return frontend_node\n\n async def store_message(self) -> Message:\n message = Message(text=self.message) if isinstance(self.message, str) else self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n\n stored_messages: list[Message] = []\n\n if self.memory:\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n await self.memory.aadd_messages([lc_message])\n\n stored_messages = await self.memory.aget_messages() or []\n\n stored_messages = [Message.from_lc_message(m) for m in stored_messages] if stored_messages else []\n\n if message.sender:\n stored_messages = [m for m in stored_messages if m.sender == message.sender]\n else:\n await astore_message(message, flow_id=self.graph.flow_id)\n stored_messages = (\n await aget_messages(\n session_id=message.session_id, sender_name=message.sender_name, sender=message.sender\n )\n or []\n )\n\n if not stored_messages:\n msg = \"No messages were stored. Please ensure that the session ID and sender are properly set.\"\n raise ValueError(msg)\n\n stored_message = stored_messages[0]\n self.status = stored_message\n return stored_message\n\n async def retrieve_messages(self) -> Data:\n sender_type = self.sender_type\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender_type == \"Machine and User\":\n sender_type = None\n\n if self.memory and not hasattr(self.memory, \"aget_messages\"):\n memory_name = type(self.memory).__name__\n err_msg = f\"External Memory object ({memory_name}) must have 'aget_messages' method.\"\n raise AttributeError(err_msg)\n # Check if n_messages is None or 0\n if n_messages == 0:\n stored = []\n elif self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = await self.memory.aget_messages()\n # langchain memories are supposed to return messages in ascending order\n\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[-n_messages:] if order == \"ASC\" else stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender_type:\n expected_type = MESSAGE_SENDER_AI if sender_type == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n # For internal memory, we always fetch the last N messages by ordering by DESC\n stored = await aget_messages(\n sender=sender_type,\n sender_name=sender_name,\n session_id=session_id,\n limit=10000,\n order=order,\n )\n if n_messages:\n stored = stored[-n_messages:] if order == \"ASC\" else stored[:n_messages]\n\n # self.status = stored\n return cast(Data, stored)\n\n async def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, await self.retrieve_messages())\n # self.status = stored_text\n return Message(text=stored_text)\n\n async def retrieve_messages_dataframe(self) -> DataFrame:\n \"\"\"Convert the retrieved messages into a DataFrame.\n\n Returns:\n DataFrame: A DataFrame containing the message data.\n \"\"\"\n messages = await self.retrieve_messages()\n return DataFrame(messages)\n\n def update_build_config(\n self,\n build_config: dotdict,\n field_value: Any, # noqa: ARG002\n field_name: str | None = None, # noqa: ARG002\n ) -> dotdict:\n return set_current_fields(\n build_config=build_config,\n action_fields=self.mode_config,\n selected_action=build_config[\"mode\"][\"value\"],\n default_fields=self.default_keys,\n func=set_field_display,\n )\n"
302
+ "value": "from typing import Any, cast\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs.inputs import DropdownInput, HandleInput, IntInput, MessageTextInput, MultilineInput, TabInput\nfrom langflow.memory import aget_messages, astore_message\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\nfrom langflow.template.field.base import Output\nfrom langflow.utils.component_utils import set_current_fields, set_field_display\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Message History\"\n description = \"Stores or retrieves stored chat messages from Langflow tables or an external memory.\"\n documentation: str = \"https://docs.langflow.org/components-helpers#message-history\"\n icon = \"message-square-more\"\n name = \"Memory\"\n default_keys = [\"mode\", \"memory\"]\n mode_config = {\n \"Store\": [\"message\", \"memory\", \"sender\", \"sender_name\", \"session_id\"],\n \"Retrieve\": [\"n_messages\", \"order\", \"template\", \"memory\"],\n }\n\n inputs = [\n TabInput(\n name=\"mode\",\n display_name=\"Mode\",\n options=[\"Retrieve\", \"Store\"],\n value=\"Retrieve\",\n info=\"Operation mode: Store messages or Retrieve messages.\",\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"message\",\n display_name=\"Message\",\n info=\"The chat message to be stored.\",\n tool_mode=True,\n dynamic=True,\n show=False,\n ),\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"Memory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"sender_type\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender\",\n display_name=\"Sender\",\n info=\"The sender of the message. Might be Machine or User. \"\n \"If empty, the current sender parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n show=False,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n show=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n value=\"\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n tool_mode=True,\n required=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n show=False,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Message\", name=\"messages_text\", method=\"retrieve_messages_as_text\", dynamic=True),\n Output(display_name=\"Dataframe\", name=\"dataframe\", method=\"retrieve_messages_dataframe\", dynamic=True),\n ]\n\n def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict:\n \"\"\"Dynamically show only the relevant output based on the selected output type.\"\"\"\n if field_name == \"mode\":\n # Start with empty outputs\n frontend_node[\"outputs\"] = []\n if field_value == \"Store\":\n frontend_node[\"outputs\"] = [\n Output(\n display_name=\"Stored Messages\",\n name=\"stored_messages\",\n method=\"store_message\",\n hidden=True,\n dynamic=True,\n )\n ]\n if field_value == \"Retrieve\":\n frontend_node[\"outputs\"] = [\n Output(\n display_name=\"Messages\", name=\"messages_text\", method=\"retrieve_messages_as_text\", dynamic=True\n ),\n Output(\n display_name=\"Dataframe\", name=\"dataframe\", method=\"retrieve_messages_dataframe\", dynamic=True\n ),\n ]\n return frontend_node\n\n async def store_message(self) -> Message:\n message = Message(text=self.message) if isinstance(self.message, str) else self.message\n\n message.session_id = self.session_id or message.session_id\n message.sender = self.sender or message.sender or MESSAGE_SENDER_AI\n message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI\n\n stored_messages: list[Message] = []\n\n if self.memory:\n self.memory.session_id = message.session_id\n lc_message = message.to_lc_message()\n await self.memory.aadd_messages([lc_message])\n\n stored_messages = await self.memory.aget_messages() or []\n\n stored_messages = [Message.from_lc_message(m) for m in stored_messages] if stored_messages else []\n\n if message.sender:\n stored_messages = [m for m in stored_messages if m.sender == message.sender]\n else:\n await astore_message(message, flow_id=self.graph.flow_id)\n stored_messages = (\n await aget_messages(\n session_id=message.session_id, sender_name=message.sender_name, sender=message.sender\n )\n or []\n )\n\n if not stored_messages:\n msg = \"No messages were stored. Please ensure that the session ID and sender are properly set.\"\n raise ValueError(msg)\n\n stored_message = stored_messages[0]\n self.status = stored_message\n return stored_message\n\n async def retrieve_messages(self) -> Data:\n sender_type = self.sender_type\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender_type == \"Machine and User\":\n sender_type = None\n\n if self.memory and not hasattr(self.memory, \"aget_messages\"):\n memory_name = type(self.memory).__name__\n err_msg = f\"External Memory object ({memory_name}) must have 'aget_messages' method.\"\n raise AttributeError(err_msg)\n # Check if n_messages is None or 0\n if n_messages == 0:\n stored = []\n elif self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = await self.memory.aget_messages()\n # langchain memories are supposed to return messages in ascending order\n\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[-n_messages:] if order == \"ASC\" else stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender_type:\n expected_type = MESSAGE_SENDER_AI if sender_type == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n # For internal memory, we always fetch the last N messages by ordering by DESC\n stored = await aget_messages(\n sender=sender_type,\n sender_name=sender_name,\n session_id=session_id,\n limit=10000,\n order=order,\n )\n if n_messages:\n stored = stored[-n_messages:] if order == \"ASC\" else stored[:n_messages]\n\n # self.status = stored\n return cast(\"Data\", stored)\n\n async def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, await self.retrieve_messages())\n # self.status = stored_text\n return Message(text=stored_text)\n\n async def retrieve_messages_dataframe(self) -> DataFrame:\n \"\"\"Convert the retrieved messages into a DataFrame.\n\n Returns:\n DataFrame: A DataFrame containing the message data.\n \"\"\"\n messages = await self.retrieve_messages()\n return DataFrame(messages)\n\n def update_build_config(\n self,\n build_config: dotdict,\n field_value: Any, # noqa: ARG002\n field_name: str | None = None, # noqa: ARG002\n ) -> dotdict:\n return set_current_fields(\n build_config=build_config,\n action_fields=self.mode_config,\n selected_action=build_config[\"mode\"][\"value\"],\n default_fields=self.default_keys,\n func=set_field_display,\n )\n"
294
303
  },
295
304
  "memory": {
296
305
  "_input_type": "HandleInput",
@@ -1926,6 +1935,15 @@
1926
1935
  "lf_version": "1.4.3",
1927
1936
  "metadata": {
1928
1937
  "code_hash": "192913db3453",
1938
+ "dependencies": {
1939
+ "dependencies": [
1940
+ {
1941
+ "name": "langflow",
1942
+ "version": null
1943
+ }
1944
+ ],
1945
+ "total_dependencies": 1
1946
+ },
1929
1947
  "module": "langflow.components.input_output.chat.ChatInput"
1930
1948
  },
1931
1949
  "minimized": true,
@@ -2243,6 +2261,23 @@
2243
2261
  "legacy": false,
2244
2262
  "metadata": {
2245
2263
  "code_hash": "6f74e04e39d5",
2264
+ "dependencies": {
2265
+ "dependencies": [
2266
+ {
2267
+ "name": "orjson",
2268
+ "version": "3.10.15"
2269
+ },
2270
+ {
2271
+ "name": "fastapi",
2272
+ "version": "0.115.13"
2273
+ },
2274
+ {
2275
+ "name": "langflow",
2276
+ "version": null
2277
+ }
2278
+ ],
2279
+ "total_dependencies": 3
2280
+ },
2246
2281
  "module": "langflow.components.input_output.chat_output.ChatOutput"
2247
2282
  },
2248
2283
  "minimized": true,