langflow-base-nightly 0.5.1.dev3__py3-none-any.whl → 0.5.1.dev4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (935) hide show
  1. langflow/__init__.py +215 -0
  2. langflow/__main__.py +16 -2
  3. langflow/alembic/versions/006b3990db50_add_unique_constraints.py +4 -7
  4. langflow/alembic/versions/012fb73ac359_add_folder_table.py +4 -5
  5. langflow/alembic/versions/0ae3a2674f32_update_the_columns_that_need_to_change_.py +11 -20
  6. langflow/alembic/versions/0b8757876a7c_.py +4 -7
  7. langflow/alembic/versions/0d60fcbd4e8e_create_vertex_builds_table.py +4 -6
  8. langflow/alembic/versions/1a110b568907_replace_credential_table_with_variable.py +4 -5
  9. langflow/alembic/versions/1b8b740a6fa3_remove_fk_constraint_in_message_.py +32 -27
  10. langflow/alembic/versions/1c79524817ed_add_unique_constraints_per_user_in_.py +4 -5
  11. langflow/alembic/versions/1d90f8a0efe1_update_description_columns_type.py +4 -5
  12. langflow/alembic/versions/1eab2c3eb45e_event_error.py +14 -15
  13. langflow/alembic/versions/1ef9c4f3765d_.py +5 -10
  14. langflow/alembic/versions/1f4d6df60295_add_default_fields_column.py +4 -5
  15. langflow/alembic/versions/260dbcc8b680_adds_tables.py +4 -5
  16. langflow/alembic/versions/29fe8f1f806b_add_missing_index.py +4 -5
  17. langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py +4 -7
  18. langflow/alembic/versions/3bb0ddf32dfb_add_unique_constraints_per_user_in_flow_.py +4 -5
  19. langflow/alembic/versions/4e5980a44eaa_fix_date_times_again.py +1 -2
  20. langflow/alembic/versions/58b28437a398_modify_nullable.py +1 -2
  21. langflow/alembic/versions/5ace73a7f223_new_remove_table_upgrade_op.py +6 -12
  22. langflow/alembic/versions/631faacf5da2_add_webhook_columns.py +4 -5
  23. langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py +4 -5
  24. langflow/alembic/versions/66f72f04a1de_add_mcp_support_with_project_settings_.py +21 -23
  25. langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py +4 -5
  26. langflow/alembic/versions/6e7b581b5648_fix_nullable.py +4 -5
  27. langflow/alembic/versions/7843803a87b5_store_updates.py +4 -6
  28. langflow/alembic/versions/79e675cb6752_change_datetime_type.py +1 -2
  29. langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py +4 -10
  30. langflow/alembic/versions/90be8e2ed91e_create_transactions_table.py +4 -6
  31. langflow/alembic/versions/93e2705fa8d6_add_column_save_path_to_flow.py +7 -9
  32. langflow/alembic/versions/a72f5cf9c2f9_add_endpoint_name_col.py +4 -5
  33. langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +1 -2
  34. langflow/alembic/versions/bc2f01c40e4a_new_fixes.py +4 -5
  35. langflow/alembic/versions/c153816fd85f_set_name_and_value_to_not_nullable.py +4 -5
  36. langflow/alembic/versions/d066bfd22890_add_message_table.py +4 -4
  37. langflow/alembic/versions/d2d475a1f7c0_add_tags_column_to_flow.py +12 -13
  38. langflow/alembic/versions/d3dbf656a499_add_gradient_column_in_flow.py +12 -12
  39. langflow/alembic/versions/d9a6ea21edcd_rename_default_folder.py +7 -10
  40. langflow/alembic/versions/dd9e0804ebd1_add_v2_file_table.py +8 -7
  41. langflow/alembic/versions/e3162c1804e6_add_persistent_locked_state.py +10 -10
  42. langflow/alembic/versions/e3bc869fa272_fix_nullable.py +4 -5
  43. langflow/alembic/versions/e56d87f8994a_add_optins_column_to_user.py +13 -14
  44. langflow/alembic/versions/e5a65ecff2cd_nullable_in_vertex_build.py +4 -5
  45. langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py +4 -5
  46. langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py +4 -5
  47. langflow/alembic/versions/f3b2d1f1002d_add_column_access_type_to_flow.py +19 -15
  48. langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py +4 -6
  49. langflow/alembic/versions/fd531f8868b1_fix_credential_table.py +5 -8
  50. langflow/api/build.py +5 -4
  51. langflow/api/health_check_router.py +1 -1
  52. langflow/api/limited_background_tasks.py +1 -1
  53. langflow/api/log_router.py +1 -2
  54. langflow/api/utils.py +2 -2
  55. langflow/api/v1/base.py +1 -2
  56. langflow/api/v1/callback.py +4 -9
  57. langflow/api/v1/chat.py +6 -7
  58. langflow/api/v1/endpoints.py +15 -15
  59. langflow/api/v1/files.py +1 -1
  60. langflow/api/v1/flows.py +1 -1
  61. langflow/api/v1/knowledge_bases.py +1 -1
  62. langflow/api/v1/mcp.py +1 -1
  63. langflow/api/v1/mcp_projects.py +14 -5
  64. langflow/api/v1/mcp_utils.py +3 -3
  65. langflow/api/v1/openai_responses.py +4 -4
  66. langflow/api/v1/schemas.py +3 -38
  67. langflow/api/v1/starter_projects.py +61 -3
  68. langflow/api/v1/store.py +1 -1
  69. langflow/api/v1/validate.py +3 -3
  70. langflow/api/v1/voice_mode.py +2 -2
  71. langflow/api/v2/files.py +1 -1
  72. langflow/api/v2/mcp.py +2 -2
  73. langflow/base/__init__.py +11 -0
  74. langflow/base/agents/__init__.py +3 -0
  75. langflow/base/data/__init__.py +2 -4
  76. langflow/base/data/utils.py +2 -197
  77. langflow/base/embeddings/__init__.py +3 -0
  78. langflow/base/io/__init__.py +7 -0
  79. langflow/base/io/chat.py +5 -18
  80. langflow/base/io/text.py +2 -21
  81. langflow/base/knowledge_bases/__init__.py +3 -0
  82. langflow/base/memory/__init__.py +3 -0
  83. langflow/base/models/__init__.py +2 -2
  84. langflow/base/models/openai_constants.py +6 -120
  85. langflow/base/prompts/__init__.py +3 -0
  86. langflow/base/prompts/api_utils.py +2 -223
  87. langflow/base/textsplitters/__init__.py +3 -0
  88. langflow/base/tools/__init__.py +3 -0
  89. langflow/base/vectorstores/__init__.py +3 -0
  90. langflow/components/__init__.py +7 -259
  91. langflow/components/agents.py +6 -0
  92. langflow/components/anthropic.py +6 -0
  93. langflow/components/data.py +6 -0
  94. langflow/components/helpers.py +6 -0
  95. langflow/components/knowledge_bases/ingestion.py +13 -14
  96. langflow/components/knowledge_bases/retrieval.py +8 -7
  97. langflow/components/openai.py +6 -0
  98. langflow/components/processing/__init__.py +1 -117
  99. langflow/components/processing/converter.py +3 -149
  100. langflow/custom/__init__.py +26 -3
  101. langflow/custom/custom_component/__init__.py +4 -0
  102. langflow/custom/custom_component/component.py +20 -1738
  103. langflow/custom/custom_component/component_with_cache.py +1 -8
  104. langflow/custom/custom_component/custom_component.py +1 -552
  105. langflow/custom/utils.py +1 -872
  106. langflow/custom/validate.py +1 -0
  107. langflow/events/event_manager.py +18 -108
  108. langflow/field_typing/__init__.py +6 -6
  109. langflow/field_typing/constants.py +87 -122
  110. langflow/field_typing/range_spec.py +2 -32
  111. langflow/frontend/assets/{SlackIcon-Cc7Qnzki.js → SlackIcon-v88osOTA.js} +1 -1
  112. langflow/frontend/assets/{Wikipedia-7ulMZY46.js → Wikipedia-DD_S2k00.js} +1 -1
  113. langflow/frontend/assets/{Wolfram-By9PGsHS.js → Wolfram-EO2C5noN.js} +1 -1
  114. langflow/frontend/assets/{index-DVLIDc2_.js → index-1Gv1mfvk.js} +1 -1
  115. langflow/frontend/assets/{index-MVW4HTEk.js → index-7v-bzlzf.js} +1 -1
  116. langflow/frontend/assets/{index-CUzlcce2.js → index-9CbMazbV.js} +1 -1
  117. langflow/frontend/assets/{index-CU16NJD7.js → index-B8ZHP8g2.js} +1 -1
  118. langflow/frontend/assets/{index-v8eXbWlM.js → index-B8y2e6vN.js} +1 -1
  119. langflow/frontend/assets/{index-BX_asvRB.js → index-BBRUGsyr.js} +1 -1
  120. langflow/frontend/assets/{index-9FL5xjkL.js → index-BGwqQwlh.js} +1 -1
  121. langflow/frontend/assets/{index-BAn-AzCS.js → index-BIq-k-FG.js} +1 -1
  122. langflow/frontend/assets/{index-D5c2nNvp.js → index-BSN73YP8.js} +1 -1
  123. langflow/frontend/assets/{index-DMCerPJM.js → index-BU8R8jRn.js} +1 -1
  124. langflow/frontend/assets/{index-CvSoff-8.js → index-BV6yx8ey.js} +1 -1
  125. langflow/frontend/assets/{index-BISPW-f6.js → index-BYIsg-Eh.js} +1 -1
  126. langflow/frontend/assets/{index-GzOGB_fo.js → index-B_ksDBSQ.js} +1 -1
  127. langflow/frontend/assets/{index-BIqEYjNT.js → index-Ba1UOZ9A.js} +1 -1
  128. langflow/frontend/assets/{index-ByxGmq5p.js → index-Ba9tKRQg.js} +1 -1
  129. langflow/frontend/assets/{index-BLEWsL1U.js → index-Bbfaw8ca.js} +1 -1
  130. langflow/frontend/assets/{index-C_MhBX6R.js → index-BbuGqvAx.js} +1 -1
  131. langflow/frontend/assets/{index-RH_I78z_.js → index-BeoXu1YX.js} +1 -1
  132. langflow/frontend/assets/{index-cYFKmtmg.js → index-BfjZmOnH.js} +1 -1
  133. langflow/frontend/assets/{index-Bm9i8F4W.js → index-Bjzy_HZB.js} +1 -1
  134. langflow/frontend/assets/{index-_szO7sta.js → index-BofEkpYB.js} +1 -1
  135. langflow/frontend/assets/{index-DP1oE6QB.js → index-Bp7Mty2H.js} +1 -1
  136. langflow/frontend/assets/{index-CeswGUz3.js → index-BqX1H6yK.js} +1 -1
  137. langflow/frontend/assets/{index-C8pI0lzi.js → index-BqtBAJAN.js} +1 -1
  138. langflow/frontend/assets/{index-BusCv3bR.js → index-Bsfraj7A.js} +1 -1
  139. langflow/frontend/assets/{index-BWnKMRFJ.js → index-BtFl7fER.js} +1 -1
  140. langflow/frontend/assets/{index-DnlVWWU8.js → index-BvX993Sv.js} +1 -1
  141. langflow/frontend/assets/{index-C676MS3I.js → index-BvgQ2vzM.js} +1 -1
  142. langflow/frontend/assets/{index-DJ6HD14g.js → index-BwY98u8n.js} +1 -1
  143. langflow/frontend/assets/{index-C51yNvIL.js → index-C-RIJAOS.js} +1 -1
  144. langflow/frontend/assets/{index-DiblXWmk.js → index-C1K6A38P.js} +1 -1
  145. langflow/frontend/assets/{index-Co__gFM1.js → index-C3Vwhx0t.js} +1 -1
  146. langflow/frontend/assets/{index-Coi86oqP.js → index-C5XUG_gr.js} +1 -1
  147. langflow/frontend/assets/{index-jwzN3Jd_.js → index-C6ouLG9o.js} +1 -1
  148. langflow/frontend/assets/{index-CQQ-4XMS.js → index-C7ZJ_Z6f.js} +1 -1
  149. langflow/frontend/assets/{index-Bl7RpmrB.js → index-CCOGIwGY.js} +1 -1
  150. langflow/frontend/assets/{index-CVkIdc6y.js → index-CCcye2rt.js} +1 -1
  151. langflow/frontend/assets/{index-bMhyLtgS.js → index-CFR4yJQB.js} +1 -1
  152. langflow/frontend/assets/{index-aAgSKWb3.js → index-CIGmPP0H.js} +1 -1
  153. langflow/frontend/assets/{index-BGt6jQ4x.js → index-CJmMEa6d.js} +1 -1
  154. langflow/frontend/assets/{index-DX7JcSMz.js → index-CJxD7lyU.js} +1 -1
  155. langflow/frontend/assets/{index-BZ-A4K98.js → index-CL_vu6ut.js} +1 -1
  156. langflow/frontend/assets/{index-BMpKFGhI.js → index-COf3UnBn.js} +1 -1
  157. langflow/frontend/assets/{index-xN8ogFdo.js → index-CV9650h_.js} +1 -1
  158. langflow/frontend/assets/{index-OsUvqIUr.js → index-CVDzych0.js} +1 -1
  159. langflow/frontend/assets/{index-BH7AyHxp.js → index-CWIHsC4D.js} +1 -1
  160. langflow/frontend/assets/{index-mjwtJmkP.js → index-CXCnFZ0L.js} +1 -1
  161. langflow/frontend/assets/{index-3jlSQi5Y.js → index-Ca_Pw_Dn.js} +1 -1
  162. langflow/frontend/assets/{index-D-SnFlhU.js → index-Cbb3bX9e.js} +1 -1
  163. langflow/frontend/assets/{index--e0oQqZh.js → index-CcJtOz-Z.js} +1 -1
  164. langflow/frontend/assets/{index-S-sc0Cm9.js → index-CfTbTHEv.js} +1 -1
  165. langflow/frontend/assets/{index-Deu8rlaZ.js → index-ChoxDAgX.js} +1 -1
  166. langflow/frontend/assets/{index-lnF9Eqr2.js → index-Cn4gw8aE.js} +1 -1
  167. langflow/frontend/assets/{index-C_NwzK6j.js → index-CnpLg4zX.js} +1 -1
  168. langflow/frontend/assets/{index-DznH7Jbq.js → index-Cpao2omG.js} +1 -1
  169. langflow/frontend/assets/{index-DpWrk8mA.js → index-CqoxM01j.js} +1 -1
  170. langflow/frontend/assets/{index-Bw-TIIC6.js → index-CrHf2Ic1.js} +1 -1
  171. langflow/frontend/assets/{index-DmYLDQag.js → index-CrV0uIjp.js} +1 -1
  172. langflow/frontend/assets/{index-Dp7ZQyL3.js → index-CssADaak.js} +1 -1
  173. langflow/frontend/assets/{index-CNh0rwur.js → index-CtJdNLy9.js} +1 -1
  174. langflow/frontend/assets/{index-Ca1b7Iag.js → index-CyeWD2dh.js} +1 -1
  175. langflow/frontend/assets/{index-DcApTyZ7.js → index-D1xzD7uc.js} +1 -1
  176. langflow/frontend/assets/{index-B3GvPjhD.js → index-D6MuXC4L.js} +1 -1
  177. langflow/frontend/assets/{index-Cw0UComa.js → index-D8w9zvIF.js} +1 -1
  178. langflow/frontend/assets/{index-C-2MRYoJ.js → index-D98Gn0A6.js} +1 -1
  179. langflow/frontend/assets/{index-aWnZIwHd.js → index-DBhjpWkf.js} +1 -1
  180. langflow/frontend/assets/{index-nw3WF9lY.js → index-DCCRJzcY.js} +1 -1
  181. langflow/frontend/assets/{index-RjeC0kaX.js → index-DCTRSkEW.js} +1 -1
  182. langflow/frontend/assets/{index-B_kBTgxV.js → index-DCUfitVj.js} +1 -1
  183. langflow/frontend/assets/{index-ChsGhZn3.js → index-DDdz-Xcl.js} +1 -1
  184. langflow/frontend/assets/{index-7yAHPRxv.js → index-DGdMwZjG.js} +1 -1
  185. langflow/frontend/assets/{index-DjQElpEg.js → index-DGtl2vMw.js} +1 -1
  186. langflow/frontend/assets/{index-BCXhKCOK.js → index-DHVdkrni.js} +1 -1
  187. langflow/frontend/assets/{index-S8uJXTOq.js → index-DJBWwjgl.js} +1 -1
  188. langflow/frontend/assets/{index-qiVTWUuf.js → index-DMAkJ_qX.js} +1 -1
  189. langflow/frontend/assets/{index-D-WStJI6.js → index-DMEvEQI5.js} +1 -1
  190. langflow/frontend/assets/{index-BhqVw9WQ.js → index-DNGRoOsp.js} +1 -1
  191. langflow/frontend/assets/{index-Cu7vC48Y.js → index-DNT_TUTa.js} +1 -1
  192. langflow/frontend/assets/{index-Bhcv5M0n.js → index-DQKOH_9K.js} +1 -1
  193. langflow/frontend/assets/{index-CLcaktde.js → index-DQhqqtqQ.js} +1 -1
  194. langflow/frontend/assets/{index-DZVgPCio.js → index-DRM7KKnG.js} +1 -1
  195. langflow/frontend/assets/{index-uybez8MR.js → index-DSCtl3a5.js} +1 -1
  196. langflow/frontend/assets/{index-CJ5A6STv.js → index-DSLNlm0Z.js} +1 -1
  197. langflow/frontend/assets/{index-Drg8me2a.js → index-DT-PspE-.js} +1 -1
  198. langflow/frontend/assets/{index-DsEZjOcp.js → index-DTpbH-p8.js} +1 -1
  199. langflow/frontend/assets/{index-DrXXKzpD.js → index-DWV6MsIq.js} +1 -1
  200. langflow/frontend/assets/{index-4JIEdyIM.js → index-DWeL4US_.js} +1 -1
  201. langflow/frontend/assets/{index-BlDsBQ_1.js → index-DYKZHhpU.js} +1 -1
  202. langflow/frontend/assets/{index-DFY8YFbC.js → index-DZyQHiMR.js} +1 -1
  203. langflow/frontend/assets/{index-CKPZpkQk.js → index-Dc6qVuSa.js} +1 -1
  204. langflow/frontend/assets/{index-yyAaYjLR.js → index-DkYuicnC.js} +1 -1
  205. langflow/frontend/assets/{index-DmVt5Jlx.js → index-Dlj_2mMs.js} +1 -1
  206. langflow/frontend/assets/{index-BvRIG6P5.js → index-DmGJUrEp.js} +1 -1
  207. langflow/frontend/assets/{index-BWFIrwW1.js → index-Dn6hpCAZ.js} +1 -1
  208. langflow/frontend/assets/{index-Cb5G9Ifd.js → index-DrJU8Fgb.js} +1 -1
  209. langflow/frontend/assets/{index-COoTCxvs.js → index-DsWfdCzp.js} +1 -1
  210. langflow/frontend/assets/{index-ZjeocHyu.js → index-DvCPWs2_.js} +1 -1
  211. langflow/frontend/assets/{index-B5LHnuQR.js → index-DvPVq7OP.js} +1 -1
  212. langflow/frontend/assets/{index-BnCnYnao.js → index-Dw71ufW4.js} +1 -1
  213. langflow/frontend/assets/{index-AALDfCyt.js → index-DxkJactf.js} +1 -1
  214. langflow/frontend/assets/{index-k9jP5chN.js → index-Dz2GTphU.js} +1 -1
  215. langflow/frontend/assets/{index-BdjfHsrf.js → index-Fvd524_c.js} +1 -1
  216. langflow/frontend/assets/{index-AKVkmT4S.js → index-GAQ0Mk2M.js} +1 -1
  217. langflow/frontend/assets/{index-BZSa2qz7.js → index-Hm5-4ItD.js} +1 -1
  218. langflow/frontend/assets/{index-DbfS_UH-.js → index-IT67FzsK.js} +1 -1
  219. langflow/frontend/assets/{index-BLXN681C.js → index-ItYiij1i.js} +1 -1
  220. langflow/frontend/assets/{index-CiklyQU3.js → index-IuR_FEdB.js} +1 -1
  221. langflow/frontend/assets/{index-xV6ystWy.js → index-Jj60FQkv.js} +1 -1
  222. langflow/frontend/assets/{index-C_157Mb-.js → index-LlvshmVz.js} +1 -1
  223. langflow/frontend/assets/{index-CDphUsa3.js → index-LwKh3I_W.js} +1 -1
  224. langflow/frontend/assets/{index-BrDz-PxE.js → index-N-xxmKKH.js} +1 -1
  225. langflow/frontend/assets/{index-BsdLyYMY.js → index-RwpaHIAH.js} +1 -1
  226. langflow/frontend/assets/{index-Cu2Xr6_j.js → index-TVvsp-xh.js} +1 -1
  227. langflow/frontend/assets/{index-CPiM2oyj.js → index-TdE2u9zP.js} +1 -1
  228. langflow/frontend/assets/{index-DOj_QWqG.js → index-_x-NkYeW.js} +1 -1
  229. langflow/frontend/assets/{index-YJsAl7vm.js → index-a-YclEbW.js} +1 -1
  230. langflow/frontend/assets/{index-5-CSw2-z.js → index-e9MFKUCo.js} +1 -1
  231. langflow/frontend/assets/{index-BSwBVwyF.js → index-krPr8f2F.js} +1 -1
  232. langflow/frontend/assets/{index-Df6psZEj.js → index-kveiUWuL.js} +1 -1
  233. langflow/frontend/assets/{index-CF4_Og1m.js → index-lE3oSjJi.js} +1 -1
  234. langflow/frontend/assets/{index-C6nzdeYx.js → index-lM3UYg7F.js} +1 -1
  235. langflow/frontend/assets/{index-C-wnbBBY.js → index-nsRk3qgA.js} +1 -1
  236. langflow/frontend/assets/{index-D234yKNJ.js → index-pBO0SZLD.js} +4 -4
  237. langflow/frontend/assets/{index-BMvp94tO.js → index-pbZHsbuE.js} +1 -1
  238. langflow/frontend/assets/{index-hg2y9OAt.js → index-sfX3aWyp.js} +1 -1
  239. langflow/frontend/assets/{index-DTCrijba.js → index-xQz-VJ0-.js} +1 -1
  240. langflow/frontend/assets/{index-SB4rw8D5.js → index-yfcsaHS6.js} +1 -1
  241. langflow/frontend/assets/{index-C-bjC2sz.js → index-zcGjo9fx.js} +1 -1
  242. langflow/frontend/assets/lazyIconImports-BjqDmNYG.js +2 -0
  243. langflow/frontend/assets/{use-post-add-user-JUeLDErC.js → use-post-add-user-w3vpKSOB.js} +1 -1
  244. langflow/frontend/index.html +1 -1
  245. langflow/graph/__init__.py +4 -4
  246. langflow/helpers/data.py +2 -2
  247. langflow/helpers/flow.py +9 -7
  248. langflow/helpers/user.py +2 -2
  249. langflow/initial_setup/setup.py +9 -9
  250. langflow/initial_setup/starter_projects/Basic Prompt Chaining.json +119 -41
  251. langflow/initial_setup/starter_projects/Basic Prompting.json +45 -19
  252. langflow/initial_setup/starter_projects/Blog Writer.json +53 -21
  253. langflow/initial_setup/starter_projects/Custom Component Generator.json +121 -97
  254. langflow/initial_setup/starter_projects/Document Q&A.json +46 -18
  255. langflow/initial_setup/starter_projects/Financial Report Parser.json +49 -17
  256. langflow/initial_setup/starter_projects/Hybrid Search RAG.json +89 -50
  257. langflow/initial_setup/starter_projects/Image Sentiment Analysis.json +86 -22
  258. langflow/initial_setup/starter_projects/Instagram Copywriter.json +210 -57
  259. langflow/initial_setup/starter_projects/Invoice Summarizer.json +132 -35
  260. langflow/initial_setup/starter_projects/Knowledge Ingestion.json +8 -8
  261. langflow/initial_setup/starter_projects/Knowledge Retrieval.json +8 -8
  262. langflow/initial_setup/starter_projects/Market Research.json +174 -48
  263. langflow/initial_setup/starter_projects/Meeting Summary.json +102 -38
  264. langflow/initial_setup/starter_projects/Memory Chatbot.json +49 -21
  265. langflow/initial_setup/starter_projects/News Aggregator.json +140 -39
  266. langflow/initial_setup/starter_projects/Nvidia Remix.json +153 -181
  267. langflow/initial_setup/starter_projects/Pok/303/251dex Agent.json" +132 -35
  268. langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json +106 -43
  269. langflow/initial_setup/starter_projects/Price Deal Finder.json +136 -39
  270. langflow/initial_setup/starter_projects/Research Agent.json +206 -53
  271. langflow/initial_setup/starter_projects/Research Translation Loop.json +66 -34
  272. langflow/initial_setup/starter_projects/SEO Keyword Generator.json +41 -15
  273. langflow/initial_setup/starter_projects/SaaS Pricing.json +128 -31
  274. langflow/initial_setup/starter_projects/Search agent.json +132 -35
  275. langflow/initial_setup/starter_projects/Sequential Tasks Agents.json +422 -98
  276. langflow/initial_setup/starter_projects/Simple Agent.json +150 -42
  277. langflow/initial_setup/starter_projects/Social Media Agent.json +150 -42
  278. langflow/initial_setup/starter_projects/Text Sentiment Analysis.json +120 -24
  279. langflow/initial_setup/starter_projects/Travel Planning Agents.json +418 -94
  280. langflow/initial_setup/starter_projects/Twitter Thread Generator.json +69 -37
  281. langflow/initial_setup/starter_projects/Vector Store RAG.json +66 -38
  282. langflow/initial_setup/starter_projects/Youtube Analysis.json +191 -51
  283. langflow/initial_setup/starter_projects/basic_prompting.py +4 -4
  284. langflow/initial_setup/starter_projects/blog_writer.py +5 -5
  285. langflow/initial_setup/starter_projects/complex_agent.py +8 -8
  286. langflow/initial_setup/starter_projects/document_qa.py +5 -5
  287. langflow/initial_setup/starter_projects/hierarchical_tasks_agent.py +8 -8
  288. langflow/initial_setup/starter_projects/memory_chatbot.py +6 -6
  289. langflow/initial_setup/starter_projects/sequential_tasks_agent.py +7 -7
  290. langflow/initial_setup/starter_projects/vector_store_rag.py +8 -8
  291. langflow/inputs/__init__.py +3 -2
  292. langflow/inputs/constants.py +3 -2
  293. langflow/inputs/input_mixin.py +49 -310
  294. langflow/inputs/inputs.py +72 -703
  295. langflow/inputs/validators.py +2 -18
  296. langflow/interface/__init__.py +4 -0
  297. langflow/interface/components.py +3 -491
  298. langflow/interface/initialize/loading.py +7 -6
  299. langflow/interface/listing.py +3 -25
  300. langflow/interface/run.py +1 -1
  301. langflow/interface/utils.py +3 -111
  302. langflow/io/__init__.py +2 -2
  303. langflow/io/schema.py +11 -302
  304. langflow/load/__init__.py +4 -2
  305. langflow/load/utils.py +2 -96
  306. langflow/logging/__init__.py +2 -1
  307. langflow/logging/setup.py +1 -1
  308. langflow/main.py +8 -5
  309. langflow/memory.py +12 -6
  310. langflow/middleware.py +1 -1
  311. langflow/processing/process.py +7 -7
  312. langflow/schema/__init__.py +22 -5
  313. langflow/schema/artifact.py +1 -1
  314. langflow/schema/data.py +5 -303
  315. langflow/schema/dataframe.py +2 -205
  316. langflow/schema/graph.py +4 -45
  317. langflow/schema/image.py +2 -67
  318. langflow/schema/message.py +6 -470
  319. langflow/schema/playground_events.py +5 -6
  320. langflow/schema/schema.py +24 -117
  321. langflow/serialization/constants.py +3 -2
  322. langflow/serialization/serialization.py +1 -1
  323. langflow/server.py +1 -2
  324. langflow/services/__init__.py +1 -2
  325. langflow/services/auth/mcp_encryption.py +1 -1
  326. langflow/services/auth/service.py +1 -1
  327. langflow/services/auth/utils.py +5 -5
  328. langflow/services/cache/disk.py +2 -2
  329. langflow/services/cache/factory.py +2 -2
  330. langflow/services/cache/service.py +2 -2
  331. langflow/services/cache/utils.py +0 -11
  332. langflow/services/database/factory.py +1 -1
  333. langflow/services/database/models/flow/model.py +1 -1
  334. langflow/services/database/models/message/crud.py +2 -1
  335. langflow/services/database/models/transactions/crud.py +1 -1
  336. langflow/services/database/models/user/crud.py +1 -1
  337. langflow/services/database/service.py +2 -2
  338. langflow/services/database/utils.py +1 -2
  339. langflow/services/deps.py +12 -17
  340. langflow/services/enhanced_manager.py +71 -0
  341. langflow/services/factory.py +14 -7
  342. langflow/services/flow/flow_runner.py +4 -4
  343. langflow/services/job_queue/service.py +2 -1
  344. langflow/services/manager.py +14 -130
  345. langflow/services/schema.py +0 -1
  346. langflow/services/session/service.py +3 -2
  347. langflow/services/settings/__init__.py +0 -3
  348. langflow/services/settings/base.py +16 -549
  349. langflow/services/settings/factory.py +2 -21
  350. langflow/services/settings/feature_flags.py +2 -11
  351. langflow/services/settings/service.py +2 -31
  352. langflow/services/shared_component_cache/factory.py +1 -1
  353. langflow/services/socket/service.py +1 -1
  354. langflow/services/socket/utils.py +1 -8
  355. langflow/services/state/factory.py +1 -1
  356. langflow/services/state/service.py +3 -2
  357. langflow/services/storage/factory.py +2 -2
  358. langflow/services/storage/local.py +1 -2
  359. langflow/services/storage/s3.py +1 -2
  360. langflow/services/storage/service.py +2 -1
  361. langflow/services/store/factory.py +1 -1
  362. langflow/services/store/service.py +2 -2
  363. langflow/services/store/utils.py +1 -2
  364. langflow/services/task/service.py +2 -1
  365. langflow/services/task/temp_flow_cleanup.py +1 -1
  366. langflow/services/telemetry/factory.py +1 -1
  367. langflow/services/telemetry/service.py +2 -3
  368. langflow/services/tracing/arize_phoenix.py +3 -3
  369. langflow/services/tracing/base.py +1 -1
  370. langflow/services/tracing/factory.py +1 -1
  371. langflow/services/tracing/langfuse.py +2 -2
  372. langflow/services/tracing/langsmith.py +2 -2
  373. langflow/services/tracing/langwatch.py +4 -4
  374. langflow/services/tracing/opik.py +2 -2
  375. langflow/services/tracing/service.py +17 -11
  376. langflow/services/tracing/traceloop.py +2 -2
  377. langflow/services/tracing/utils.py +1 -1
  378. langflow/services/utils.py +54 -9
  379. langflow/services/variable/factory.py +1 -1
  380. langflow/services/variable/kubernetes.py +2 -3
  381. langflow/services/variable/kubernetes_secrets.py +1 -2
  382. langflow/services/variable/service.py +2 -3
  383. langflow/template/__init__.py +2 -9
  384. langflow/template/field/__init__.py +3 -0
  385. langflow/template/field/base.py +2 -256
  386. langflow/template/frontend_node.py +3 -0
  387. langflow/template/utils.py +2 -216
  388. langflow/utils/constants.py +28 -204
  389. langflow/utils/lazy_load.py +3 -14
  390. langflow/utils/schemas.py +2 -3
  391. langflow/utils/template_validation.py +2 -2
  392. langflow/utils/util.py +59 -479
  393. langflow/utils/validate.py +2 -488
  394. langflow/utils/voice_utils.py +1 -2
  395. langflow/worker.py +1 -1
  396. {langflow_base_nightly-0.5.1.dev3.dist-info → langflow_base_nightly-0.5.1.dev4.dist-info}/METADATA +2 -1
  397. langflow_base_nightly-0.5.1.dev4.dist-info/RECORD +633 -0
  398. langflow/base/agents/agent.py +0 -267
  399. langflow/base/agents/callback.py +0 -130
  400. langflow/base/agents/context.py +0 -109
  401. langflow/base/agents/crewai/__init__.py +0 -0
  402. langflow/base/agents/crewai/crew.py +0 -231
  403. langflow/base/agents/crewai/tasks.py +0 -12
  404. langflow/base/agents/default_prompts.py +0 -23
  405. langflow/base/agents/errors.py +0 -15
  406. langflow/base/agents/events.py +0 -346
  407. langflow/base/agents/utils.py +0 -205
  408. langflow/base/astra_assistants/__init__.py +0 -0
  409. langflow/base/astra_assistants/util.py +0 -171
  410. langflow/base/chains/__init__.py +0 -0
  411. langflow/base/chains/model.py +0 -19
  412. langflow/base/composio/__init__.py +0 -0
  413. langflow/base/composio/composio_base.py +0 -1297
  414. langflow/base/compressors/__init__.py +0 -0
  415. langflow/base/compressors/model.py +0 -60
  416. langflow/base/constants.py +0 -46
  417. langflow/base/curl/__init__.py +0 -0
  418. langflow/base/curl/parse.py +0 -188
  419. langflow/base/data/base_file.py +0 -685
  420. langflow/base/data/docling_utils.py +0 -245
  421. langflow/base/document_transformers/__init__.py +0 -0
  422. langflow/base/document_transformers/model.py +0 -43
  423. langflow/base/embeddings/aiml_embeddings.py +0 -62
  424. langflow/base/embeddings/model.py +0 -26
  425. langflow/base/flow_processing/__init__.py +0 -0
  426. langflow/base/flow_processing/utils.py +0 -86
  427. langflow/base/huggingface/__init__.py +0 -0
  428. langflow/base/huggingface/model_bridge.py +0 -133
  429. langflow/base/langchain_utilities/__init__.py +0 -0
  430. langflow/base/langchain_utilities/model.py +0 -35
  431. langflow/base/langchain_utilities/spider_constants.py +0 -1
  432. langflow/base/langwatch/__init__.py +0 -0
  433. langflow/base/langwatch/utils.py +0 -18
  434. langflow/base/mcp/__init__.py +0 -0
  435. langflow/base/mcp/constants.py +0 -2
  436. langflow/base/mcp/util.py +0 -1524
  437. langflow/base/memory/memory.py +0 -49
  438. langflow/base/memory/model.py +0 -38
  439. langflow/base/models/aiml_constants.py +0 -51
  440. langflow/base/models/anthropic_constants.py +0 -47
  441. langflow/base/models/aws_constants.py +0 -151
  442. langflow/base/models/chat_result.py +0 -76
  443. langflow/base/models/google_generative_ai_constants.py +0 -70
  444. langflow/base/models/groq_constants.py +0 -134
  445. langflow/base/models/model.py +0 -375
  446. langflow/base/models/model_input_constants.py +0 -299
  447. langflow/base/models/model_metadata.py +0 -41
  448. langflow/base/models/model_utils.py +0 -8
  449. langflow/base/models/novita_constants.py +0 -35
  450. langflow/base/models/ollama_constants.py +0 -49
  451. langflow/base/models/sambanova_constants.py +0 -18
  452. langflow/base/processing/__init__.py +0 -0
  453. langflow/base/prompts/utils.py +0 -61
  454. langflow/base/textsplitters/model.py +0 -28
  455. langflow/base/tools/base.py +0 -26
  456. langflow/base/tools/component_tool.py +0 -324
  457. langflow/base/tools/constants.py +0 -49
  458. langflow/base/tools/flow_tool.py +0 -131
  459. langflow/base/tools/run_flow.py +0 -227
  460. langflow/base/vectorstores/model.py +0 -193
  461. langflow/base/vectorstores/utils.py +0 -22
  462. langflow/base/vectorstores/vector_store_connection_decorator.py +0 -52
  463. langflow/components/FAISS/__init__.py +0 -34
  464. langflow/components/FAISS/faiss.py +0 -111
  465. langflow/components/Notion/__init__.py +0 -19
  466. langflow/components/Notion/add_content_to_page.py +0 -269
  467. langflow/components/Notion/create_page.py +0 -94
  468. langflow/components/Notion/list_database_properties.py +0 -68
  469. langflow/components/Notion/list_pages.py +0 -122
  470. langflow/components/Notion/list_users.py +0 -77
  471. langflow/components/Notion/page_content_viewer.py +0 -93
  472. langflow/components/Notion/search.py +0 -111
  473. langflow/components/Notion/update_page_property.py +0 -114
  474. langflow/components/_importing.py +0 -37
  475. langflow/components/agentql/__init__.py +0 -3
  476. langflow/components/agentql/agentql_api.py +0 -151
  477. langflow/components/agents/__init__.py +0 -4
  478. langflow/components/agents/agent.py +0 -554
  479. langflow/components/agents/mcp_component.py +0 -501
  480. langflow/components/aiml/__init__.py +0 -37
  481. langflow/components/aiml/aiml.py +0 -112
  482. langflow/components/aiml/aiml_embeddings.py +0 -37
  483. langflow/components/amazon/__init__.py +0 -36
  484. langflow/components/amazon/amazon_bedrock_embedding.py +0 -109
  485. langflow/components/amazon/amazon_bedrock_model.py +0 -124
  486. langflow/components/amazon/s3_bucket_uploader.py +0 -211
  487. langflow/components/anthropic/__init__.py +0 -34
  488. langflow/components/anthropic/anthropic.py +0 -187
  489. langflow/components/apify/__init__.py +0 -5
  490. langflow/components/apify/apify_actor.py +0 -325
  491. langflow/components/arxiv/__init__.py +0 -3
  492. langflow/components/arxiv/arxiv.py +0 -163
  493. langflow/components/assemblyai/__init__.py +0 -46
  494. langflow/components/assemblyai/assemblyai_get_subtitles.py +0 -83
  495. langflow/components/assemblyai/assemblyai_lemur.py +0 -183
  496. langflow/components/assemblyai/assemblyai_list_transcripts.py +0 -95
  497. langflow/components/assemblyai/assemblyai_poll_transcript.py +0 -72
  498. langflow/components/assemblyai/assemblyai_start_transcript.py +0 -188
  499. langflow/components/azure/__init__.py +0 -37
  500. langflow/components/azure/azure_openai.py +0 -95
  501. langflow/components/azure/azure_openai_embeddings.py +0 -83
  502. langflow/components/baidu/__init__.py +0 -32
  503. langflow/components/baidu/baidu_qianfan_chat.py +0 -113
  504. langflow/components/bing/__init__.py +0 -3
  505. langflow/components/bing/bing_search_api.py +0 -61
  506. langflow/components/cassandra/__init__.py +0 -40
  507. langflow/components/cassandra/cassandra.py +0 -264
  508. langflow/components/cassandra/cassandra_chat.py +0 -92
  509. langflow/components/cassandra/cassandra_graph.py +0 -238
  510. langflow/components/chains/__init__.py +0 -0
  511. langflow/components/chroma/__init__.py +0 -34
  512. langflow/components/chroma/chroma.py +0 -167
  513. langflow/components/cleanlab/__init__.py +0 -40
  514. langflow/components/cleanlab/cleanlab_evaluator.py +0 -157
  515. langflow/components/cleanlab/cleanlab_rag_evaluator.py +0 -254
  516. langflow/components/cleanlab/cleanlab_remediator.py +0 -131
  517. langflow/components/clickhouse/__init__.py +0 -34
  518. langflow/components/clickhouse/clickhouse.py +0 -135
  519. langflow/components/cloudflare/__init__.py +0 -32
  520. langflow/components/cloudflare/cloudflare.py +0 -81
  521. langflow/components/cohere/__init__.py +0 -40
  522. langflow/components/cohere/cohere_embeddings.py +0 -81
  523. langflow/components/cohere/cohere_models.py +0 -46
  524. langflow/components/cohere/cohere_rerank.py +0 -51
  525. langflow/components/composio/__init__.py +0 -73
  526. langflow/components/composio/composio_api.py +0 -268
  527. langflow/components/composio/dropbox_compnent.py +0 -11
  528. langflow/components/composio/github_composio.py +0 -11
  529. langflow/components/composio/gmail_composio.py +0 -38
  530. langflow/components/composio/googlecalendar_composio.py +0 -11
  531. langflow/components/composio/googlemeet_composio.py +0 -11
  532. langflow/components/composio/googletasks_composio.py +0 -8
  533. langflow/components/composio/linear_composio.py +0 -11
  534. langflow/components/composio/outlook_composio.py +0 -11
  535. langflow/components/composio/reddit_composio.py +0 -11
  536. langflow/components/composio/slack_composio.py +0 -11
  537. langflow/components/composio/slackbot_composio.py +0 -11
  538. langflow/components/composio/supabase_composio.py +0 -11
  539. langflow/components/composio/todoist_composio.py +0 -11
  540. langflow/components/composio/youtube_composio.py +0 -11
  541. langflow/components/confluence/__init__.py +0 -3
  542. langflow/components/confluence/confluence.py +0 -84
  543. langflow/components/couchbase/__init__.py +0 -34
  544. langflow/components/couchbase/couchbase.py +0 -102
  545. langflow/components/crewai/__init__.py +0 -49
  546. langflow/components/crewai/crewai.py +0 -107
  547. langflow/components/crewai/hierarchical_crew.py +0 -46
  548. langflow/components/crewai/hierarchical_task.py +0 -44
  549. langflow/components/crewai/sequential_crew.py +0 -52
  550. langflow/components/crewai/sequential_task.py +0 -73
  551. langflow/components/crewai/sequential_task_agent.py +0 -143
  552. langflow/components/custom_component/__init__.py +0 -34
  553. langflow/components/custom_component/custom_component.py +0 -31
  554. langflow/components/data/__init__.py +0 -25
  555. langflow/components/data/api_request.py +0 -545
  556. langflow/components/data/csv_to_data.py +0 -95
  557. langflow/components/data/directory.py +0 -113
  558. langflow/components/data/file.py +0 -586
  559. langflow/components/data/json_to_data.py +0 -98
  560. langflow/components/data/news_search.py +0 -164
  561. langflow/components/data/rss.py +0 -69
  562. langflow/components/data/sql_executor.py +0 -99
  563. langflow/components/data/url.py +0 -299
  564. langflow/components/data/web_search.py +0 -112
  565. langflow/components/data/webhook.py +0 -56
  566. langflow/components/datastax/__init__.py +0 -70
  567. langflow/components/datastax/astra_assistant_manager.py +0 -306
  568. langflow/components/datastax/astra_db.py +0 -69
  569. langflow/components/datastax/astra_vectorize.py +0 -124
  570. langflow/components/datastax/astradb_cql.py +0 -314
  571. langflow/components/datastax/astradb_graph.py +0 -319
  572. langflow/components/datastax/astradb_tool.py +0 -414
  573. langflow/components/datastax/astradb_vectorstore.py +0 -1285
  574. langflow/components/datastax/create_assistant.py +0 -58
  575. langflow/components/datastax/create_thread.py +0 -32
  576. langflow/components/datastax/dotenv.py +0 -35
  577. langflow/components/datastax/get_assistant.py +0 -37
  578. langflow/components/datastax/getenvvar.py +0 -30
  579. langflow/components/datastax/graph_rag.py +0 -141
  580. langflow/components/datastax/hcd.py +0 -314
  581. langflow/components/datastax/list_assistants.py +0 -25
  582. langflow/components/datastax/run.py +0 -89
  583. langflow/components/deactivated/__init__.py +0 -19
  584. langflow/components/deactivated/amazon_kendra.py +0 -66
  585. langflow/components/deactivated/chat_litellm_model.py +0 -158
  586. langflow/components/deactivated/code_block_extractor.py +0 -26
  587. langflow/components/deactivated/documents_to_data.py +0 -22
  588. langflow/components/deactivated/embed.py +0 -16
  589. langflow/components/deactivated/extract_key_from_data.py +0 -46
  590. langflow/components/deactivated/json_document_builder.py +0 -59
  591. langflow/components/deactivated/list_flows.py +0 -20
  592. langflow/components/deactivated/mcp_sse.py +0 -61
  593. langflow/components/deactivated/mcp_stdio.py +0 -62
  594. langflow/components/deactivated/merge_data.py +0 -93
  595. langflow/components/deactivated/message.py +0 -37
  596. langflow/components/deactivated/metal.py +0 -54
  597. langflow/components/deactivated/multi_query.py +0 -59
  598. langflow/components/deactivated/retriever.py +0 -43
  599. langflow/components/deactivated/selective_passthrough.py +0 -77
  600. langflow/components/deactivated/should_run_next.py +0 -40
  601. langflow/components/deactivated/split_text.py +0 -63
  602. langflow/components/deactivated/store_message.py +0 -24
  603. langflow/components/deactivated/sub_flow.py +0 -124
  604. langflow/components/deactivated/vectara_self_query.py +0 -76
  605. langflow/components/deactivated/vector_store.py +0 -24
  606. langflow/components/deepseek/__init__.py +0 -34
  607. langflow/components/deepseek/deepseek.py +0 -136
  608. langflow/components/docling/__init__.py +0 -43
  609. langflow/components/docling/chunk_docling_document.py +0 -186
  610. langflow/components/docling/docling_inline.py +0 -235
  611. langflow/components/docling/docling_remote.py +0 -193
  612. langflow/components/docling/export_docling_document.py +0 -117
  613. langflow/components/documentloaders/__init__.py +0 -0
  614. langflow/components/duckduckgo/__init__.py +0 -3
  615. langflow/components/duckduckgo/duck_duck_go_search_run.py +0 -92
  616. langflow/components/elastic/__init__.py +0 -37
  617. langflow/components/elastic/elasticsearch.py +0 -267
  618. langflow/components/elastic/opensearch.py +0 -243
  619. langflow/components/embeddings/__init__.py +0 -37
  620. langflow/components/embeddings/similarity.py +0 -76
  621. langflow/components/embeddings/text_embedder.py +0 -64
  622. langflow/components/exa/__init__.py +0 -3
  623. langflow/components/exa/exa_search.py +0 -68
  624. langflow/components/firecrawl/__init__.py +0 -43
  625. langflow/components/firecrawl/firecrawl_crawl_api.py +0 -88
  626. langflow/components/firecrawl/firecrawl_extract_api.py +0 -136
  627. langflow/components/firecrawl/firecrawl_map_api.py +0 -89
  628. langflow/components/firecrawl/firecrawl_scrape_api.py +0 -73
  629. langflow/components/git/__init__.py +0 -4
  630. langflow/components/git/git.py +0 -262
  631. langflow/components/git/gitextractor.py +0 -196
  632. langflow/components/glean/__init__.py +0 -3
  633. langflow/components/glean/glean_search_api.py +0 -173
  634. langflow/components/google/__init__.py +0 -17
  635. langflow/components/google/gmail.py +0 -192
  636. langflow/components/google/google_bq_sql_executor.py +0 -157
  637. langflow/components/google/google_drive.py +0 -92
  638. langflow/components/google/google_drive_search.py +0 -152
  639. langflow/components/google/google_generative_ai.py +0 -147
  640. langflow/components/google/google_generative_ai_embeddings.py +0 -141
  641. langflow/components/google/google_oauth_token.py +0 -89
  642. langflow/components/google/google_search_api_core.py +0 -68
  643. langflow/components/google/google_serper_api_core.py +0 -74
  644. langflow/components/groq/__init__.py +0 -34
  645. langflow/components/groq/groq.py +0 -140
  646. langflow/components/helpers/__init__.py +0 -52
  647. langflow/components/helpers/calculator_core.py +0 -89
  648. langflow/components/helpers/create_list.py +0 -40
  649. langflow/components/helpers/current_date.py +0 -42
  650. langflow/components/helpers/id_generator.py +0 -42
  651. langflow/components/helpers/memory.py +0 -251
  652. langflow/components/helpers/output_parser.py +0 -45
  653. langflow/components/helpers/store_message.py +0 -90
  654. langflow/components/homeassistant/__init__.py +0 -7
  655. langflow/components/homeassistant/home_assistant_control.py +0 -152
  656. langflow/components/homeassistant/list_home_assistant_states.py +0 -137
  657. langflow/components/huggingface/__init__.py +0 -37
  658. langflow/components/huggingface/huggingface.py +0 -197
  659. langflow/components/huggingface/huggingface_inference_api.py +0 -106
  660. langflow/components/ibm/__init__.py +0 -34
  661. langflow/components/ibm/watsonx.py +0 -203
  662. langflow/components/ibm/watsonx_embeddings.py +0 -135
  663. langflow/components/icosacomputing/__init__.py +0 -5
  664. langflow/components/icosacomputing/combinatorial_reasoner.py +0 -84
  665. langflow/components/input_output/__init__.py +0 -38
  666. langflow/components/input_output/chat.py +0 -120
  667. langflow/components/input_output/chat_output.py +0 -200
  668. langflow/components/input_output/text.py +0 -27
  669. langflow/components/input_output/text_output.py +0 -29
  670. langflow/components/jigsawstack/__init__.py +0 -23
  671. langflow/components/jigsawstack/ai_scrape.py +0 -126
  672. langflow/components/jigsawstack/ai_web_search.py +0 -136
  673. langflow/components/jigsawstack/file_read.py +0 -115
  674. langflow/components/jigsawstack/file_upload.py +0 -94
  675. langflow/components/jigsawstack/image_generation.py +0 -205
  676. langflow/components/jigsawstack/nsfw.py +0 -60
  677. langflow/components/jigsawstack/object_detection.py +0 -124
  678. langflow/components/jigsawstack/sentiment.py +0 -112
  679. langflow/components/jigsawstack/text_to_sql.py +0 -90
  680. langflow/components/jigsawstack/text_translate.py +0 -77
  681. langflow/components/jigsawstack/vocr.py +0 -107
  682. langflow/components/langchain_utilities/__init__.py +0 -109
  683. langflow/components/langchain_utilities/character.py +0 -53
  684. langflow/components/langchain_utilities/conversation.py +0 -52
  685. langflow/components/langchain_utilities/csv_agent.py +0 -107
  686. langflow/components/langchain_utilities/fake_embeddings.py +0 -26
  687. langflow/components/langchain_utilities/html_link_extractor.py +0 -35
  688. langflow/components/langchain_utilities/json_agent.py +0 -45
  689. langflow/components/langchain_utilities/langchain_hub.py +0 -126
  690. langflow/components/langchain_utilities/language_recursive.py +0 -49
  691. langflow/components/langchain_utilities/language_semantic.py +0 -138
  692. langflow/components/langchain_utilities/llm_checker.py +0 -39
  693. langflow/components/langchain_utilities/llm_math.py +0 -42
  694. langflow/components/langchain_utilities/natural_language.py +0 -61
  695. langflow/components/langchain_utilities/openai_tools.py +0 -53
  696. langflow/components/langchain_utilities/openapi.py +0 -48
  697. langflow/components/langchain_utilities/recursive_character.py +0 -60
  698. langflow/components/langchain_utilities/retrieval_qa.py +0 -83
  699. langflow/components/langchain_utilities/runnable_executor.py +0 -137
  700. langflow/components/langchain_utilities/self_query.py +0 -80
  701. langflow/components/langchain_utilities/spider.py +0 -142
  702. langflow/components/langchain_utilities/sql.py +0 -40
  703. langflow/components/langchain_utilities/sql_database.py +0 -35
  704. langflow/components/langchain_utilities/sql_generator.py +0 -78
  705. langflow/components/langchain_utilities/tool_calling.py +0 -59
  706. langflow/components/langchain_utilities/vector_store_info.py +0 -49
  707. langflow/components/langchain_utilities/vector_store_router.py +0 -33
  708. langflow/components/langchain_utilities/xml_agent.py +0 -71
  709. langflow/components/langwatch/__init__.py +0 -3
  710. langflow/components/langwatch/langwatch.py +0 -278
  711. langflow/components/link_extractors/__init__.py +0 -0
  712. langflow/components/lmstudio/__init__.py +0 -34
  713. langflow/components/lmstudio/lmstudioembeddings.py +0 -89
  714. langflow/components/lmstudio/lmstudiomodel.py +0 -129
  715. langflow/components/logic/__init__.py +0 -52
  716. langflow/components/logic/conditional_router.py +0 -171
  717. langflow/components/logic/data_conditional_router.py +0 -125
  718. langflow/components/logic/flow_tool.py +0 -110
  719. langflow/components/logic/listen.py +0 -29
  720. langflow/components/logic/loop.py +0 -125
  721. langflow/components/logic/notify.py +0 -88
  722. langflow/components/logic/pass_message.py +0 -35
  723. langflow/components/logic/run_flow.py +0 -71
  724. langflow/components/logic/sub_flow.py +0 -114
  725. langflow/components/maritalk/__init__.py +0 -32
  726. langflow/components/maritalk/maritalk.py +0 -52
  727. langflow/components/mem0/__init__.py +0 -3
  728. langflow/components/mem0/mem0_chat_memory.py +0 -136
  729. langflow/components/milvus/__init__.py +0 -34
  730. langflow/components/milvus/milvus.py +0 -115
  731. langflow/components/mistral/__init__.py +0 -37
  732. langflow/components/mistral/mistral.py +0 -114
  733. langflow/components/mistral/mistral_embeddings.py +0 -58
  734. langflow/components/models/__init__.py +0 -34
  735. langflow/components/models/embedding_model.py +0 -114
  736. langflow/components/models/language_model.py +0 -144
  737. langflow/components/mongodb/__init__.py +0 -34
  738. langflow/components/mongodb/mongodb_atlas.py +0 -213
  739. langflow/components/needle/__init__.py +0 -3
  740. langflow/components/needle/needle.py +0 -104
  741. langflow/components/notdiamond/__init__.py +0 -36
  742. langflow/components/notdiamond/notdiamond.py +0 -228
  743. langflow/components/novita/__init__.py +0 -32
  744. langflow/components/novita/novita.py +0 -130
  745. langflow/components/nvidia/__init__.py +0 -57
  746. langflow/components/nvidia/nvidia.py +0 -157
  747. langflow/components/nvidia/nvidia_embedding.py +0 -77
  748. langflow/components/nvidia/nvidia_ingest.py +0 -317
  749. langflow/components/nvidia/nvidia_rerank.py +0 -63
  750. langflow/components/nvidia/system_assist.py +0 -65
  751. langflow/components/olivya/__init__.py +0 -3
  752. langflow/components/olivya/olivya.py +0 -116
  753. langflow/components/ollama/__init__.py +0 -37
  754. langflow/components/ollama/ollama.py +0 -330
  755. langflow/components/ollama/ollama_embeddings.py +0 -106
  756. langflow/components/openai/__init__.py +0 -37
  757. langflow/components/openai/openai.py +0 -100
  758. langflow/components/openai/openai_chat_model.py +0 -158
  759. langflow/components/openrouter/__init__.py +0 -32
  760. langflow/components/openrouter/openrouter.py +0 -202
  761. langflow/components/output_parsers/__init__.py +0 -0
  762. langflow/components/perplexity/__init__.py +0 -34
  763. langflow/components/perplexity/perplexity.py +0 -75
  764. langflow/components/pgvector/__init__.py +0 -34
  765. langflow/components/pgvector/pgvector.py +0 -72
  766. langflow/components/pinecone/__init__.py +0 -34
  767. langflow/components/pinecone/pinecone.py +0 -134
  768. langflow/components/processing/alter_metadata.py +0 -108
  769. langflow/components/processing/batch_run.py +0 -205
  770. langflow/components/processing/combine_text.py +0 -39
  771. langflow/components/processing/create_data.py +0 -110
  772. langflow/components/processing/data_operations.py +0 -438
  773. langflow/components/processing/data_to_dataframe.py +0 -70
  774. langflow/components/processing/dataframe_operations.py +0 -321
  775. langflow/components/processing/extract_key.py +0 -53
  776. langflow/components/processing/filter_data.py +0 -42
  777. langflow/components/processing/filter_data_values.py +0 -88
  778. langflow/components/processing/json_cleaner.py +0 -103
  779. langflow/components/processing/lambda_filter.py +0 -154
  780. langflow/components/processing/llm_router.py +0 -499
  781. langflow/components/processing/merge_data.py +0 -90
  782. langflow/components/processing/message_to_data.py +0 -36
  783. langflow/components/processing/parse_data.py +0 -70
  784. langflow/components/processing/parse_dataframe.py +0 -68
  785. langflow/components/processing/parse_json_data.py +0 -90
  786. langflow/components/processing/parser.py +0 -143
  787. langflow/components/processing/prompt.py +0 -67
  788. langflow/components/processing/python_repl_core.py +0 -98
  789. langflow/components/processing/regex.py +0 -82
  790. langflow/components/processing/save_file.py +0 -208
  791. langflow/components/processing/select_data.py +0 -48
  792. langflow/components/processing/split_text.py +0 -141
  793. langflow/components/processing/structured_output.py +0 -202
  794. langflow/components/processing/update_data.py +0 -160
  795. langflow/components/prototypes/__init__.py +0 -34
  796. langflow/components/prototypes/python_function.py +0 -73
  797. langflow/components/qdrant/__init__.py +0 -34
  798. langflow/components/qdrant/qdrant.py +0 -109
  799. langflow/components/redis/__init__.py +0 -37
  800. langflow/components/redis/redis.py +0 -89
  801. langflow/components/redis/redis_chat.py +0 -43
  802. langflow/components/sambanova/__init__.py +0 -32
  803. langflow/components/sambanova/sambanova.py +0 -84
  804. langflow/components/scrapegraph/__init__.py +0 -40
  805. langflow/components/scrapegraph/scrapegraph_markdownify_api.py +0 -64
  806. langflow/components/scrapegraph/scrapegraph_search_api.py +0 -64
  807. langflow/components/scrapegraph/scrapegraph_smart_scraper_api.py +0 -71
  808. langflow/components/searchapi/__init__.py +0 -36
  809. langflow/components/searchapi/search.py +0 -79
  810. langflow/components/serpapi/__init__.py +0 -3
  811. langflow/components/serpapi/serp.py +0 -115
  812. langflow/components/serper/__init__.py +0 -3
  813. langflow/components/serper/google_serper_api_core.py +0 -74
  814. langflow/components/supabase/__init__.py +0 -37
  815. langflow/components/supabase/supabase.py +0 -76
  816. langflow/components/tavily/__init__.py +0 -4
  817. langflow/components/tavily/tavily_extract.py +0 -117
  818. langflow/components/tavily/tavily_search.py +0 -212
  819. langflow/components/textsplitters/__init__.py +0 -0
  820. langflow/components/toolkits/__init__.py +0 -0
  821. langflow/components/tools/__init__.py +0 -72
  822. langflow/components/tools/calculator.py +0 -103
  823. langflow/components/tools/google_search_api.py +0 -45
  824. langflow/components/tools/google_serper_api.py +0 -115
  825. langflow/components/tools/python_code_structured_tool.py +0 -327
  826. langflow/components/tools/python_repl.py +0 -97
  827. langflow/components/tools/search_api.py +0 -87
  828. langflow/components/tools/searxng.py +0 -145
  829. langflow/components/tools/serp_api.py +0 -119
  830. langflow/components/tools/tavily_search_tool.py +0 -344
  831. langflow/components/tools/wikidata_api.py +0 -102
  832. langflow/components/tools/wikipedia_api.py +0 -49
  833. langflow/components/tools/yahoo_finance.py +0 -124
  834. langflow/components/twelvelabs/__init__.py +0 -52
  835. langflow/components/twelvelabs/convert_astra_results.py +0 -84
  836. langflow/components/twelvelabs/pegasus_index.py +0 -311
  837. langflow/components/twelvelabs/split_video.py +0 -291
  838. langflow/components/twelvelabs/text_embeddings.py +0 -57
  839. langflow/components/twelvelabs/twelvelabs_pegasus.py +0 -408
  840. langflow/components/twelvelabs/video_embeddings.py +0 -100
  841. langflow/components/twelvelabs/video_file.py +0 -179
  842. langflow/components/unstructured/__init__.py +0 -3
  843. langflow/components/unstructured/unstructured.py +0 -121
  844. langflow/components/upstash/__init__.py +0 -34
  845. langflow/components/upstash/upstash.py +0 -124
  846. langflow/components/vectara/__init__.py +0 -37
  847. langflow/components/vectara/vectara.py +0 -97
  848. langflow/components/vectara/vectara_rag.py +0 -164
  849. langflow/components/vectorstores/__init__.py +0 -34
  850. langflow/components/vectorstores/local_db.py +0 -261
  851. langflow/components/vertexai/__init__.py +0 -37
  852. langflow/components/vertexai/vertexai.py +0 -71
  853. langflow/components/vertexai/vertexai_embeddings.py +0 -67
  854. langflow/components/weaviate/__init__.py +0 -34
  855. langflow/components/weaviate/weaviate.py +0 -89
  856. langflow/components/wikipedia/__init__.py +0 -4
  857. langflow/components/wikipedia/wikidata.py +0 -86
  858. langflow/components/wikipedia/wikipedia.py +0 -53
  859. langflow/components/wolframalpha/__init__.py +0 -3
  860. langflow/components/wolframalpha/wolfram_alpha_api.py +0 -54
  861. langflow/components/xai/__init__.py +0 -32
  862. langflow/components/xai/xai.py +0 -167
  863. langflow/components/yahoosearch/__init__.py +0 -3
  864. langflow/components/yahoosearch/yahoo.py +0 -137
  865. langflow/components/youtube/__init__.py +0 -52
  866. langflow/components/youtube/channel.py +0 -227
  867. langflow/components/youtube/comments.py +0 -231
  868. langflow/components/youtube/playlist.py +0 -33
  869. langflow/components/youtube/search.py +0 -120
  870. langflow/components/youtube/trending.py +0 -285
  871. langflow/components/youtube/video_details.py +0 -263
  872. langflow/components/youtube/youtube_transcripts.py +0 -118
  873. langflow/components/zep/__init__.py +0 -3
  874. langflow/components/zep/zep.py +0 -44
  875. langflow/custom/attributes.py +0 -86
  876. langflow/custom/code_parser/__init__.py +0 -3
  877. langflow/custom/code_parser/code_parser.py +0 -361
  878. langflow/custom/custom_component/base_component.py +0 -118
  879. langflow/custom/dependency_analyzer.py +0 -165
  880. langflow/custom/directory_reader/__init__.py +0 -3
  881. langflow/custom/directory_reader/directory_reader.py +0 -359
  882. langflow/custom/directory_reader/utils.py +0 -171
  883. langflow/custom/eval.py +0 -12
  884. langflow/custom/schema.py +0 -32
  885. langflow/custom/tree_visitor.py +0 -21
  886. langflow/frontend/assets/lazyIconImports-Ci-S9xBA.js +0 -2
  887. langflow/graph/edge/__init__.py +0 -0
  888. langflow/graph/edge/base.py +0 -277
  889. langflow/graph/edge/schema.py +0 -119
  890. langflow/graph/edge/utils.py +0 -0
  891. langflow/graph/graph/__init__.py +0 -0
  892. langflow/graph/graph/ascii.py +0 -202
  893. langflow/graph/graph/base.py +0 -2185
  894. langflow/graph/graph/constants.py +0 -58
  895. langflow/graph/graph/runnable_vertices_manager.py +0 -133
  896. langflow/graph/graph/schema.py +0 -53
  897. langflow/graph/graph/state_model.py +0 -66
  898. langflow/graph/graph/utils.py +0 -1024
  899. langflow/graph/schema.py +0 -75
  900. langflow/graph/state/__init__.py +0 -0
  901. langflow/graph/state/model.py +0 -237
  902. langflow/graph/utils.py +0 -229
  903. langflow/graph/vertex/__init__.py +0 -0
  904. langflow/graph/vertex/base.py +0 -811
  905. langflow/graph/vertex/constants.py +0 -0
  906. langflow/graph/vertex/exceptions.py +0 -4
  907. langflow/graph/vertex/param_handler.py +0 -255
  908. langflow/graph/vertex/schema.py +0 -26
  909. langflow/graph/vertex/utils.py +0 -19
  910. langflow/graph/vertex/vertex_types.py +0 -489
  911. langflow/legacy_custom/__init__.py +0 -0
  912. langflow/legacy_custom/customs.py +0 -16
  913. langflow/load/load.py +0 -250
  914. langflow/logging/logger.py +0 -369
  915. langflow/processing/utils.py +0 -25
  916. langflow/schema/openai_responses_schemas.py +0 -74
  917. langflow/schema/serialize.py +0 -13
  918. langflow/services/chat/config.py +0 -2
  919. langflow/services/settings/auth.py +0 -130
  920. langflow/services/settings/constants.py +0 -31
  921. langflow/services/settings/manager.py +0 -49
  922. langflow/services/settings/utils.py +0 -40
  923. langflow/template/field/prompt.py +0 -2
  924. langflow/template/frontend_node/__init__.py +0 -6
  925. langflow/template/frontend_node/base.py +0 -212
  926. langflow/template/frontend_node/constants.py +0 -65
  927. langflow/template/frontend_node/custom_components.py +0 -97
  928. langflow/template/template/__init__.py +0 -0
  929. langflow/template/template/base.py +0 -99
  930. langflow/utils/async_helpers.py +0 -42
  931. langflow/utils/concurrency.py +0 -60
  932. langflow/utils/util_strings.py +0 -56
  933. langflow_base_nightly-0.5.1.dev3.dist-info/RECORD +0 -1159
  934. {langflow_base_nightly-0.5.1.dev3.dist-info → langflow_base_nightly-0.5.1.dev4.dist-info}/WHEEL +0 -0
  935. {langflow_base_nightly-0.5.1.dev3.dist-info → langflow_base_nightly-0.5.1.dev4.dist-info}/entry_points.txt +0 -0
@@ -305,7 +305,7 @@
305
305
  "legacy": false,
306
306
  "lf_version": "1.1.5",
307
307
  "metadata": {
308
- "code_hash": "6f74e04e39d5",
308
+ "code_hash": "9619107fecd1",
309
309
  "dependencies": {
310
310
  "dependencies": [
311
311
  {
@@ -317,13 +317,13 @@
317
317
  "version": "0.116.1"
318
318
  },
319
319
  {
320
- "name": "langflow",
320
+ "name": "lfx",
321
321
  "version": null
322
322
  }
323
323
  ],
324
324
  "total_dependencies": 3
325
325
  },
326
- "module": "langflow.components.input_output.chat_output.ChatOutput"
326
+ "module": "lfx.components.input_output.chat_output.ChatOutput"
327
327
  },
328
328
  "minimized": true,
329
329
  "output_types": [],
@@ -426,7 +426,7 @@
426
426
  "show": true,
427
427
  "title_case": false,
428
428
  "type": "code",
429
- "value": "from collections.abc import Generator\nfrom typing import Any\n\nimport orjson\nfrom fastapi.encoders import jsonable_encoder\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.helpers.data import safe_convert\nfrom langflow.inputs.inputs import BoolInput, DropdownInput, HandleInput, MessageTextInput\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.template.field.base import Output\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-output\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n HandleInput(\n name=\"input_value\",\n display_name=\"Inputs\",\n info=\"Message to be passed as output.\",\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n BoolInput(\n name=\"clean_data\",\n display_name=\"Basic Clean Data\",\n value=True,\n info=\"Whether to clean the data\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Output Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n # Handle case where source is a ChatOpenAI object\n if hasattr(source, \"model_name\"):\n source_dict[\"source\"] = source.model_name\n elif hasattr(source, \"model\"):\n source_dict[\"source\"] = str(source.model)\n else:\n source_dict[\"source\"] = str(source)\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n # First convert the input to string if needed\n text = self.convert_to_string()\n\n # Get source properties\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n\n # Create or use existing Message object\n if isinstance(self.input_value, Message):\n message = self.input_value\n # Update message properties\n message.text = text\n else:\n message = Message(text=text)\n\n # Set message properties\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n\n # Store message if needed\n if self.session_id and self.should_store_message:\n stored_message = await self.send_message(message)\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n\n def _serialize_data(self, data: Data) -> str:\n \"\"\"Serialize Data object to JSON string.\"\"\"\n # Convert data.data to JSON-serializable format\n serializable_data = jsonable_encoder(data.data)\n # Serialize with orjson, enabling pretty printing with indentation\n json_bytes = orjson.dumps(serializable_data, option=orjson.OPT_INDENT_2)\n # Convert bytes to string and wrap in Markdown code blocks\n return \"```json\\n\" + json_bytes.decode(\"utf-8\") + \"\\n```\"\n\n def _validate_input(self) -> None:\n \"\"\"Validate the input data and raise ValueError if invalid.\"\"\"\n if self.input_value is None:\n msg = \"Input data cannot be None\"\n raise ValueError(msg)\n if isinstance(self.input_value, list) and not all(\n isinstance(item, Message | Data | DataFrame | str) for item in self.input_value\n ):\n invalid_types = [\n type(item).__name__\n for item in self.input_value\n if not isinstance(item, Message | Data | DataFrame | str)\n ]\n msg = f\"Expected Data or DataFrame or Message or str, got {invalid_types}\"\n raise TypeError(msg)\n if not isinstance(\n self.input_value,\n Message | Data | DataFrame | str | list | Generator | type(None),\n ):\n type_name = type(self.input_value).__name__\n msg = f\"Expected Data or DataFrame or Message or str, Generator or None, got {type_name}\"\n raise TypeError(msg)\n\n def convert_to_string(self) -> str | Generator[Any, None, None]:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n self._validate_input()\n if isinstance(self.input_value, list):\n return \"\\n\".join([safe_convert(item, clean_data=self.clean_data) for item in self.input_value])\n if isinstance(self.input_value, Generator):\n return self.input_value\n return safe_convert(self.input_value)\n"
429
+ "value": "from collections.abc import Generator\nfrom typing import Any\n\nimport orjson\nfrom fastapi.encoders import jsonable_encoder\n\nfrom lfx.base.io.chat import ChatComponent\nfrom lfx.helpers.data import safe_convert\nfrom lfx.inputs.inputs import BoolInput, DropdownInput, HandleInput, MessageTextInput\nfrom lfx.schema.data import Data\nfrom lfx.schema.dataframe import DataFrame\nfrom lfx.schema.message import Message\nfrom lfx.schema.properties import Source\nfrom lfx.template.field.base import Output\nfrom lfx.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-output\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n HandleInput(\n name=\"input_value\",\n display_name=\"Inputs\",\n info=\"Message to be passed as output.\",\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n BoolInput(\n name=\"clean_data\",\n display_name=\"Basic Clean Data\",\n value=True,\n info=\"Whether to clean the data\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Output Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n # Handle case where source is a ChatOpenAI object\n if hasattr(source, \"model_name\"):\n source_dict[\"source\"] = source.model_name\n elif hasattr(source, \"model\"):\n source_dict[\"source\"] = str(source.model)\n else:\n source_dict[\"source\"] = str(source)\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n # First convert the input to string if needed\n text = self.convert_to_string()\n\n # Get source properties\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n\n # Create or use existing Message object\n if isinstance(self.input_value, Message):\n message = self.input_value\n # Update message properties\n message.text = text\n else:\n message = Message(text=text)\n\n # Set message properties\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n\n # Store message if needed\n if self.session_id and self.should_store_message:\n stored_message = await self.send_message(message)\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n\n def _serialize_data(self, data: Data) -> str:\n \"\"\"Serialize Data object to JSON string.\"\"\"\n # Convert data.data to JSON-serializable format\n serializable_data = jsonable_encoder(data.data)\n # Serialize with orjson, enabling pretty printing with indentation\n json_bytes = orjson.dumps(serializable_data, option=orjson.OPT_INDENT_2)\n # Convert bytes to string and wrap in Markdown code blocks\n return \"```json\\n\" + json_bytes.decode(\"utf-8\") + \"\\n```\"\n\n def _validate_input(self) -> None:\n \"\"\"Validate the input data and raise ValueError if invalid.\"\"\"\n if self.input_value is None:\n msg = \"Input data cannot be None\"\n raise ValueError(msg)\n if isinstance(self.input_value, list) and not all(\n isinstance(item, Message | Data | DataFrame | str) for item in self.input_value\n ):\n invalid_types = [\n type(item).__name__\n for item in self.input_value\n if not isinstance(item, Message | Data | DataFrame | str)\n ]\n msg = f\"Expected Data or DataFrame or Message or str, got {invalid_types}\"\n raise TypeError(msg)\n if not isinstance(\n self.input_value,\n Message | Data | DataFrame | str | list | Generator | type(None),\n ):\n type_name = type(self.input_value).__name__\n msg = f\"Expected Data or DataFrame or Message or str, Generator or None, got {type_name}\"\n raise TypeError(msg)\n\n def convert_to_string(self) -> str | Generator[Any, None, None]:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n self._validate_input()\n if isinstance(self.input_value, list):\n return \"\\n\".join([safe_convert(item, clean_data=self.clean_data) for item in self.input_value])\n if isinstance(self.input_value, Generator):\n return self.input_value\n return safe_convert(self.input_value)\n"
430
430
  },
431
431
  "data_template": {
432
432
  "_input_type": "MessageTextInput",
@@ -686,7 +686,7 @@
686
686
  "key": "needle",
687
687
  "legacy": false,
688
688
  "metadata": {
689
- "code_hash": "57d868cb067b",
689
+ "code_hash": "5f6cedaa0217",
690
690
  "dependencies": {
691
691
  "dependencies": [
692
692
  {
@@ -694,13 +694,13 @@
694
694
  "version": "0.3.21"
695
695
  },
696
696
  {
697
- "name": "langflow",
697
+ "name": "lfx",
698
698
  "version": null
699
699
  }
700
700
  ],
701
701
  "total_dependencies": 2
702
702
  },
703
- "module": "langflow.components.needle.needle.NeedleComponent"
703
+ "module": "lfx.components.needle.needle.NeedleComponent"
704
704
  },
705
705
  "minimized": false,
706
706
  "output_types": [],
@@ -743,7 +743,7 @@
743
743
  "show": true,
744
744
  "title_case": false,
745
745
  "type": "code",
746
- "value": "from langchain_community.retrievers.needle import NeedleRetriever\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.io import IntInput, MessageTextInput, Output, SecretStrInput\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI\n\n\nclass NeedleComponent(Component):\n display_name = \"Needle Retriever\"\n description = \"A retriever that uses the Needle API to search collections.\"\n documentation = \"https://docs.needle-ai.com\"\n icon = \"Needle\"\n name = \"needle\"\n\n inputs = [\n SecretStrInput(\n name=\"needle_api_key\",\n display_name=\"Needle API Key\",\n info=\"Your Needle API key.\",\n required=True,\n ),\n MessageTextInput(\n name=\"collection_id\",\n display_name=\"Collection ID\",\n info=\"The ID of the Needle collection.\",\n required=True,\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"User Query\",\n info=\"Enter your question here. In tool mode, you can also specify top_k parameter (min: 20).\",\n required=True,\n tool_mode=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K Results\",\n info=\"Number of search results to return (min: 20).\",\n value=20,\n required=True,\n ),\n ]\n\n outputs = [Output(display_name=\"Result\", name=\"result\", type_=\"Message\", method=\"run\")]\n\n def run(self) -> Message:\n # Extract query and top_k\n query_input = self.query\n actual_query = query_input.get(\"query\", \"\") if isinstance(query_input, dict) else query_input\n\n # Parse top_k from tool input or use default, always enforcing minimum of 20\n try:\n if isinstance(query_input, dict) and \"top_k\" in query_input:\n agent_top_k = query_input.get(\"top_k\")\n # Check if agent_top_k is not None before converting to int\n top_k = max(20, int(agent_top_k)) if agent_top_k is not None else max(20, self.top_k)\n else:\n top_k = max(20, self.top_k)\n except (ValueError, TypeError):\n top_k = max(20, self.top_k)\n\n # Validate required inputs\n if not self.needle_api_key or not self.needle_api_key.strip():\n error_msg = \"The Needle API key cannot be empty.\"\n raise ValueError(error_msg)\n if not self.collection_id or not self.collection_id.strip():\n error_msg = \"The Collection ID cannot be empty.\"\n raise ValueError(error_msg)\n if not actual_query or not actual_query.strip():\n error_msg = \"The query cannot be empty.\"\n raise ValueError(error_msg)\n\n try:\n # Initialize the retriever and get documents\n retriever = NeedleRetriever(\n needle_api_key=self.needle_api_key,\n collection_id=self.collection_id,\n top_k=top_k,\n )\n\n docs = retriever.get_relevant_documents(actual_query)\n\n # Format the response\n if not docs:\n text_content = \"No relevant documents found for the query.\"\n else:\n context = \"\\n\\n\".join([f\"Document {i + 1}:\\n{doc.page_content}\" for i, doc in enumerate(docs)])\n text_content = f\"Question: {actual_query}\\n\\nContext:\\n{context}\"\n\n # Return formatted message\n return Message(\n text=text_content,\n type=\"assistant\",\n sender=MESSAGE_SENDER_AI,\n additional_kwargs={\n \"source_documents\": [{\"page_content\": doc.page_content, \"metadata\": doc.metadata} for doc in docs],\n \"top_k_used\": top_k,\n },\n )\n\n except Exception as e:\n error_msg = f\"Error processing query: {e!s}\"\n raise ValueError(error_msg) from e\n"
746
+ "value": "from langchain_community.retrievers.needle import NeedleRetriever\n\nfrom lfx.custom.custom_component.component import Component\nfrom lfx.io import IntInput, MessageTextInput, Output, SecretStrInput\nfrom lfx.schema.message import Message\nfrom lfx.utils.constants import MESSAGE_SENDER_AI\n\n\nclass NeedleComponent(Component):\n display_name = \"Needle Retriever\"\n description = \"A retriever that uses the Needle API to search collections.\"\n documentation = \"https://docs.needle-ai.com\"\n icon = \"Needle\"\n name = \"needle\"\n\n inputs = [\n SecretStrInput(\n name=\"needle_api_key\",\n display_name=\"Needle API Key\",\n info=\"Your Needle API key.\",\n required=True,\n ),\n MessageTextInput(\n name=\"collection_id\",\n display_name=\"Collection ID\",\n info=\"The ID of the Needle collection.\",\n required=True,\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"User Query\",\n info=\"Enter your question here. In tool mode, you can also specify top_k parameter (min: 20).\",\n required=True,\n tool_mode=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K Results\",\n info=\"Number of search results to return (min: 20).\",\n value=20,\n required=True,\n ),\n ]\n\n outputs = [Output(display_name=\"Result\", name=\"result\", type_=\"Message\", method=\"run\")]\n\n def run(self) -> Message:\n # Extract query and top_k\n query_input = self.query\n actual_query = query_input.get(\"query\", \"\") if isinstance(query_input, dict) else query_input\n\n # Parse top_k from tool input or use default, always enforcing minimum of 20\n try:\n if isinstance(query_input, dict) and \"top_k\" in query_input:\n agent_top_k = query_input.get(\"top_k\")\n # Check if agent_top_k is not None before converting to int\n top_k = max(20, int(agent_top_k)) if agent_top_k is not None else max(20, self.top_k)\n else:\n top_k = max(20, self.top_k)\n except (ValueError, TypeError):\n top_k = max(20, self.top_k)\n\n # Validate required inputs\n if not self.needle_api_key or not self.needle_api_key.strip():\n error_msg = \"The Needle API key cannot be empty.\"\n raise ValueError(error_msg)\n if not self.collection_id or not self.collection_id.strip():\n error_msg = \"The Collection ID cannot be empty.\"\n raise ValueError(error_msg)\n if not actual_query or not actual_query.strip():\n error_msg = \"The query cannot be empty.\"\n raise ValueError(error_msg)\n\n try:\n # Initialize the retriever and get documents\n retriever = NeedleRetriever(\n needle_api_key=self.needle_api_key,\n collection_id=self.collection_id,\n top_k=top_k,\n )\n\n docs = retriever.get_relevant_documents(actual_query)\n\n # Format the response\n if not docs:\n text_content = \"No relevant documents found for the query.\"\n else:\n context = \"\\n\\n\".join([f\"Document {i + 1}:\\n{doc.page_content}\" for i, doc in enumerate(docs)])\n text_content = f\"Question: {actual_query}\\n\\nContext:\\n{context}\"\n\n # Return formatted message\n return Message(\n text=text_content,\n type=\"assistant\",\n sender=MESSAGE_SENDER_AI,\n additional_kwargs={\n \"source_documents\": [{\"page_content\": doc.page_content, \"metadata\": doc.metadata} for doc in docs],\n \"top_k_used\": top_k,\n },\n )\n\n except Exception as e:\n error_msg = f\"Error processing query: {e!s}\"\n raise ValueError(error_msg) from e\n"
747
747
  },
748
748
  "collection_id": {
749
749
  "_input_type": "MessageTextInput",
@@ -907,17 +907,17 @@
907
907
  "key": "ChatInput",
908
908
  "legacy": false,
909
909
  "metadata": {
910
- "code_hash": "192913db3453",
910
+ "code_hash": "715a37648834",
911
911
  "dependencies": {
912
912
  "dependencies": [
913
913
  {
914
- "name": "langflow",
914
+ "name": "lfx",
915
915
  "version": null
916
916
  }
917
917
  ],
918
918
  "total_dependencies": 1
919
919
  },
920
- "module": "langflow.components.input_output.chat.ChatInput"
920
+ "module": "lfx.components.input_output.chat.ChatInput"
921
921
  },
922
922
  "minimized": true,
923
923
  "output_types": [],
@@ -1003,7 +1003,7 @@
1003
1003
  "show": true,
1004
1004
  "title_case": false,
1005
1005
  "type": "code",
1006
- "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-input\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Chat Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
1006
+ "value": "from lfx.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom lfx.base.io.chat import ChatComponent\nfrom lfx.inputs.inputs import BoolInput\nfrom lfx.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom lfx.schema.message import Message\nfrom lfx.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-input\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Chat Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
1007
1007
  },
1008
1008
  "files": {
1009
1009
  "_input_type": "FileInput",
@@ -1242,7 +1242,27 @@
1242
1242
  "icon": "bot",
1243
1243
  "key": "Agent",
1244
1244
  "legacy": false,
1245
- "metadata": {},
1245
+ "metadata": {
1246
+ "code_hash": "1a4bc0f629fe",
1247
+ "dependencies": {
1248
+ "dependencies": [
1249
+ {
1250
+ "name": "langchain_core",
1251
+ "version": "0.3.75"
1252
+ },
1253
+ {
1254
+ "name": "pydantic",
1255
+ "version": "2.10.6"
1256
+ },
1257
+ {
1258
+ "name": "lfx",
1259
+ "version": null
1260
+ }
1261
+ ],
1262
+ "total_dependencies": 3
1263
+ },
1264
+ "module": "lfx.components.agents.agent.AgentComponent"
1265
+ },
1246
1266
  "minimized": false,
1247
1267
  "output_types": [],
1248
1268
  "outputs": [
@@ -1367,7 +1387,7 @@
1367
1387
  "password": true,
1368
1388
  "placeholder": "",
1369
1389
  "real_time_refresh": true,
1370
- "required": true,
1390
+ "required": false,
1371
1391
  "show": true,
1372
1392
  "title_case": false,
1373
1393
  "type": "str",
@@ -1389,7 +1409,32 @@
1389
1409
  "show": true,
1390
1410
  "title_case": false,
1391
1411
  "type": "code",
1392
- "value": "import json\nimport re\n\nfrom langchain_core.tools import StructuredTool\nfrom pydantic import ValidationError\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.agents.events import ExceptionWithMessageError\nfrom langflow.base.models.model_input_constants import (\n ALL_PROVIDER_FIELDS,\n MODEL_DYNAMIC_UPDATE_FIELDS,\n MODEL_PROVIDERS,\n MODEL_PROVIDERS_DICT,\n MODELS_METADATA,\n)\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers.current_date import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.custom.custom_component.component import _get_component_toolkit\nfrom langflow.custom.utils import update_component_build_config\nfrom langflow.field_typing import Tool\nfrom langflow.helpers.base_model import build_model_from_schema\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MultilineInput, Output, TableInput\nfrom langflow.logging import logger\nfrom langflow.schema.data import Data\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\nfrom langflow.schema.table import EditMode\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nMODEL_PROVIDERS_LIST = [\"Anthropic\", \"Google Generative AI\", \"Groq\", \"OpenAI\"]\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n documentation: str = \"https://docs.langflow.org/agents\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n # Filter out json_mode from OpenAI inputs since we handle structured output differently\n openai_inputs_filtered = [\n input_field\n for input_field in MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"]\n if not (hasattr(input_field, \"name\") and input_field.name == \"json_mode\")\n ]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*MODEL_PROVIDERS_LIST, \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n options_metadata=[MODELS_METADATA[key] for key in MODEL_PROVIDERS_LIST] + [{\"icon\": \"brain\"}],\n ),\n *openai_inputs_filtered,\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Chat History Messages\",\n value=100,\n info=\"Number of chat history messages to retrieve.\",\n advanced=True,\n show=True,\n ),\n MultilineInput(\n name=\"format_instructions\",\n display_name=\"Output Format Instructions\",\n info=\"Generic Template for structured output formatting. Valid only with Structured response.\",\n value=(\n \"You are an AI that extracts structured JSON objects from unstructured text. \"\n \"Use a predefined schema with expected types (str, int, float, bool, dict). \"\n \"Extract ALL relevant instances that match the schema - if multiple patterns exist, capture them all. \"\n \"Fill missing or ambiguous values with defaults: null for missing values. \"\n \"Remove exact duplicates but keep variations that have different field values. \"\n \"Always return valid JSON in the expected format, never throw errors. \"\n \"If multiple objects can be extracted, return them all in the structured format.\"\n ),\n advanced=True,\n ),\n TableInput(\n name=\"output_schema\",\n display_name=\"Output Schema\",\n info=(\n \"Schema Validation: Define the structure and data types for structured output. \"\n \"No validation if no output schema.\"\n ),\n advanced=True,\n required=False,\n value=[],\n table_schema=[\n {\n \"name\": \"name\",\n \"display_name\": \"Name\",\n \"type\": \"str\",\n \"description\": \"Specify the name of the output field.\",\n \"default\": \"field\",\n \"edit_mode\": EditMode.INLINE,\n },\n {\n \"name\": \"description\",\n \"display_name\": \"Description\",\n \"type\": \"str\",\n \"description\": \"Describe the purpose of the output field.\",\n \"default\": \"description of field\",\n \"edit_mode\": EditMode.POPOVER,\n },\n {\n \"name\": \"type\",\n \"display_name\": \"Type\",\n \"type\": \"str\",\n \"edit_mode\": EditMode.INLINE,\n \"description\": (\"Indicate the data type of the output field (e.g., str, int, float, bool, dict).\"),\n \"options\": [\"str\", \"int\", \"float\", \"bool\", \"dict\"],\n \"default\": \"str\",\n },\n {\n \"name\": \"multiple\",\n \"display_name\": \"As List\",\n \"type\": \"boolean\",\n \"description\": \"Set to True if this output field should be a list of the specified type.\",\n \"default\": \"False\",\n \"edit_mode\": EditMode.INLINE,\n },\n ],\n ),\n *LCToolsAgentComponent._base_inputs,\n # removed memory inputs from agent component\n # *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [\n Output(name=\"response\", display_name=\"Response\", method=\"message_response\"),\n Output(name=\"structured_response\", display_name=\"Structured Response\", method=\"json_response\", tool_mode=False),\n ]\n\n async def get_agent_requirements(self):\n \"\"\"Get the agent requirements for the agent.\"\"\"\n llm_model, display_name = await self.get_llm()\n if llm_model is None:\n msg = \"No language model selected. Please choose a model to proceed.\"\n raise ValueError(msg)\n self.model_name = get_model_name(llm_model, display_name=display_name)\n\n # Get memory data\n self.chat_history = await self.get_memory_data()\n if isinstance(self.chat_history, Message):\n self.chat_history = [self.chat_history]\n\n # Add current date tool if enabled\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n current_date_tool = (await CurrentDateComponent(**self.get_base_args()).to_toolkit()).pop(0)\n if not isinstance(current_date_tool, StructuredTool):\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise TypeError(msg)\n self.tools.append(current_date_tool)\n return llm_model, self.chat_history, self.tools\n\n async def message_response(self) -> Message:\n try:\n llm_model, self.chat_history, self.tools = await self.get_agent_requirements()\n # Set up and run agent\n self.set(\n llm=llm_model,\n tools=self.tools or [],\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n result = await self.run_agent(agent)\n\n # Store result for potential JSON output\n self._agent_result = result\n\n except (ValueError, TypeError, KeyError) as e:\n await logger.aerror(f\"{type(e).__name__}: {e!s}\")\n raise\n except ExceptionWithMessageError as e:\n await logger.aerror(f\"ExceptionWithMessageError occurred: {e}\")\n raise\n # Avoid catching blind Exception; let truly unexpected exceptions propagate\n except Exception as e:\n await logger.aerror(f\"Unexpected error: {e!s}\")\n raise\n else:\n return result\n\n def _preprocess_schema(self, schema):\n \"\"\"Preprocess schema to ensure correct data types for build_model_from_schema.\"\"\"\n processed_schema = []\n for field in schema:\n processed_field = {\n \"name\": str(field.get(\"name\", \"field\")),\n \"type\": str(field.get(\"type\", \"str\")),\n \"description\": str(field.get(\"description\", \"\")),\n \"multiple\": field.get(\"multiple\", False),\n }\n # Ensure multiple is handled correctly\n if isinstance(processed_field[\"multiple\"], str):\n processed_field[\"multiple\"] = processed_field[\"multiple\"].lower() in [\"true\", \"1\", \"t\", \"y\", \"yes\"]\n processed_schema.append(processed_field)\n return processed_schema\n\n async def build_structured_output_base(self, content: str):\n \"\"\"Build structured output with optional BaseModel validation.\"\"\"\n json_pattern = r\"\\{.*\\}\"\n schema_error_msg = \"Try setting an output schema\"\n\n # Try to parse content as JSON first\n json_data = None\n try:\n json_data = json.loads(content)\n except json.JSONDecodeError:\n json_match = re.search(json_pattern, content, re.DOTALL)\n if json_match:\n try:\n json_data = json.loads(json_match.group())\n except json.JSONDecodeError:\n return {\"content\": content, \"error\": schema_error_msg}\n else:\n return {\"content\": content, \"error\": schema_error_msg}\n\n # If no output schema provided, return parsed JSON without validation\n if not hasattr(self, \"output_schema\") or not self.output_schema or len(self.output_schema) == 0:\n return json_data\n\n # Use BaseModel validation with schema\n try:\n processed_schema = self._preprocess_schema(self.output_schema)\n output_model = build_model_from_schema(processed_schema)\n\n # Validate against the schema\n if isinstance(json_data, list):\n # Multiple objects\n validated_objects = []\n for item in json_data:\n try:\n validated_obj = output_model.model_validate(item)\n validated_objects.append(validated_obj.model_dump())\n except ValidationError as e:\n await logger.aerror(f\"Validation error for item: {e}\")\n # Include invalid items with error info\n validated_objects.append({\"data\": item, \"validation_error\": str(e)})\n return validated_objects\n\n # Single object\n try:\n validated_obj = output_model.model_validate(json_data)\n return [validated_obj.model_dump()] # Return as list for consistency\n except ValidationError as e:\n await logger.aerror(f\"Validation error: {e}\")\n return [{\"data\": json_data, \"validation_error\": str(e)}]\n\n except (TypeError, ValueError) as e:\n await logger.aerror(f\"Error building structured output: {e}\")\n # Fallback to parsed JSON without validation\n return json_data\n\n async def json_response(self) -> Data:\n \"\"\"Convert agent response to structured JSON Data output with schema validation.\"\"\"\n # Always use structured chat agent for JSON response mode for better JSON formatting\n try:\n system_components = []\n\n # 1. Agent Instructions (system_prompt)\n agent_instructions = getattr(self, \"system_prompt\", \"\") or \"\"\n if agent_instructions:\n system_components.append(f\"{agent_instructions}\")\n\n # 2. Format Instructions\n format_instructions = getattr(self, \"format_instructions\", \"\") or \"\"\n if format_instructions:\n system_components.append(f\"Format instructions: {format_instructions}\")\n\n # 3. Schema Information from BaseModel\n if hasattr(self, \"output_schema\") and self.output_schema and len(self.output_schema) > 0:\n try:\n processed_schema = self._preprocess_schema(self.output_schema)\n output_model = build_model_from_schema(processed_schema)\n schema_dict = output_model.model_json_schema()\n schema_info = (\n \"You are given some text that may include format instructions, \"\n \"explanations, or other content alongside a JSON schema.\\n\\n\"\n \"Your task:\\n\"\n \"- Extract only the JSON schema.\\n\"\n \"- Return it as valid JSON.\\n\"\n \"- Do not include format instructions, explanations, or extra text.\\n\\n\"\n \"Input:\\n\"\n f\"{json.dumps(schema_dict, indent=2)}\\n\\n\"\n \"Output (only JSON schema):\"\n )\n system_components.append(schema_info)\n except (ValidationError, ValueError, TypeError, KeyError) as e:\n await logger.aerror(f\"Could not build schema for prompt: {e}\", exc_info=True)\n\n # Combine all components\n combined_instructions = \"\\n\\n\".join(system_components) if system_components else \"\"\n llm_model, self.chat_history, self.tools = await self.get_agent_requirements()\n self.set(\n llm=llm_model,\n tools=self.tools or [],\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=combined_instructions,\n )\n\n # Create and run structured chat agent\n try:\n structured_agent = self.create_agent_runnable()\n except (NotImplementedError, ValueError, TypeError) as e:\n await logger.aerror(f\"Error with structured chat agent: {e}\")\n raise\n try:\n result = await self.run_agent(structured_agent)\n except (ExceptionWithMessageError, ValueError, TypeError, RuntimeError) as e:\n await logger.aerror(f\"Error with structured agent result: {e}\")\n raise\n # Extract content from structured agent result\n if hasattr(result, \"content\"):\n content = result.content\n elif hasattr(result, \"text\"):\n content = result.text\n else:\n content = str(result)\n\n except (ExceptionWithMessageError, ValueError, TypeError, NotImplementedError, AttributeError) as e:\n await logger.aerror(f\"Error with structured chat agent: {e}\")\n # Fallback to regular agent\n content_str = \"No content returned from agent\"\n return Data(data={\"content\": content_str, \"error\": str(e)})\n\n # Process with structured output validation\n try:\n structured_output = await self.build_structured_output_base(content)\n\n # Handle different output formats\n if isinstance(structured_output, list) and structured_output:\n if len(structured_output) == 1:\n return Data(data=structured_output[0])\n return Data(data={\"results\": structured_output})\n if isinstance(structured_output, dict):\n return Data(data=structured_output)\n return Data(data={\"content\": content})\n\n except (ValueError, TypeError) as e:\n await logger.aerror(f\"Error in structured output processing: {e}\")\n return Data(data={\"content\": content, \"error\": str(e)})\n\n async def get_memory_data(self):\n # TODO: This is a temporary fix to avoid message duplication. We should develop a function for this.\n messages = (\n await MemoryComponent(**self.get_base_args())\n .set(session_id=self.graph.session_id, order=\"Ascending\", n_messages=self.n_messages)\n .retrieve_messages()\n )\n return [\n message for message in messages if getattr(message, \"id\", None) != getattr(self.input_value, \"id\", None)\n ]\n\n async def get_llm(self):\n if not isinstance(self.agent_llm, str):\n return self.agent_llm, None\n\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if not provider_info:\n msg = f\"Invalid model provider: {self.agent_llm}\"\n raise ValueError(msg)\n\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n\n return self._build_llm_model(component_class, inputs, prefix), display_name\n\n except (AttributeError, ValueError, TypeError, RuntimeError) as e:\n await logger.aerror(f\"Error building {self.agent_llm} language model: {e!s}\")\n msg = f\"Failed to initialize language model: {e!s}\"\n raise ValueError(msg) from e\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {}\n for input_ in inputs:\n if hasattr(self, f\"{prefix}{input_.name}\"):\n model_kwargs[input_.name] = getattr(self, f\"{prefix}{input_.name}\")\n return component.set(**model_kwargs).build_model()\n\n def set_component_params(self, component):\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\")\n # Filter out json_mode and only use attributes that exist on this component\n model_kwargs = {}\n for input_ in inputs:\n if hasattr(self, f\"{prefix}{input_.name}\"):\n model_kwargs[input_.name] = getattr(self, f\"{prefix}{input_.name}\")\n\n return component.set(**model_kwargs)\n return component\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n async def update_build_config(\n self, build_config: dotdict, field_value: str, field_name: str | None = None\n ) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name in (\"agent_llm\",):\n build_config[\"agent_llm\"][\"value\"] = field_value\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = await update_component_build_config(\n component_class, build_config, field_value, \"model_name\"\n )\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n build_config[\"agent_llm\"][\"display_name\"] = \"Model Provider\"\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n options_metadata=[MODELS_METADATA[key] for key in sorted(MODELS_METADATA.keys())]\n + [{\"icon\": \"brain\"}],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if (\n isinstance(self.agent_llm, str)\n and self.agent_llm in MODEL_PROVIDERS_DICT\n and field_name in MODEL_DYNAMIC_UPDATE_FIELDS\n ):\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n component_class = self.set_component_params(component_class)\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = await update_component_build_config(\n component_class, build_config, field_value, \"model_name\"\n )\n return dotdict({k: v.to_dict() if hasattr(v, \"to_dict\") else v for k, v in build_config.items()})\n\n async def _get_tools(self) -> list[Tool]:\n component_toolkit = _get_component_toolkit()\n tools_names = self._build_tools_names()\n agent_description = self.get_tool_description()\n # TODO: Agent Description Depreciated Feature to be removed\n description = f\"{agent_description}{tools_names}\"\n tools = component_toolkit(component=self).get_tools(\n tool_name=\"Call_Agent\", tool_description=description, callbacks=self.get_langchain_callbacks()\n )\n if hasattr(self, \"tools_metadata\"):\n tools = component_toolkit(component=self, metadata=self.tools_metadata).update_tools_metadata(tools=tools)\n return tools\n"
1412
+ "value": "import json\nimport re\n\nfrom langchain_core.tools import StructuredTool, Tool\nfrom pydantic import ValidationError\n\nfrom lfx.base.agents.agent import LCToolsAgentComponent\nfrom lfx.base.agents.events import ExceptionWithMessageError\nfrom lfx.base.models.model_input_constants import (\n ALL_PROVIDER_FIELDS,\n MODEL_DYNAMIC_UPDATE_FIELDS,\n MODEL_PROVIDERS,\n MODEL_PROVIDERS_DICT,\n MODELS_METADATA,\n)\nfrom lfx.base.models.model_utils import get_model_name\nfrom lfx.components.helpers.current_date import CurrentDateComponent\nfrom lfx.components.helpers.memory import MemoryComponent\nfrom lfx.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom lfx.custom.custom_component.component import get_component_toolkit\nfrom lfx.custom.utils import update_component_build_config\nfrom lfx.helpers.base_model import build_model_from_schema\nfrom lfx.inputs.inputs import TableInput\nfrom lfx.io import BoolInput, DropdownInput, IntInput, MultilineInput, Output\nfrom lfx.log.logger import logger\nfrom lfx.schema.data import Data\nfrom lfx.schema.dotdict import dotdict\nfrom lfx.schema.message import Message\nfrom lfx.schema.table import EditMode\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nMODEL_PROVIDERS_LIST = [\"Anthropic\", \"Google Generative AI\", \"Groq\", \"OpenAI\"]\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n documentation: str = \"https://docs.langflow.org/agents\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n # Filter out json_mode from OpenAI inputs since we handle structured output differently\n if \"OpenAI\" in MODEL_PROVIDERS_DICT:\n openai_inputs_filtered = [\n input_field\n for input_field in MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"]\n if not (hasattr(input_field, \"name\") and input_field.name == \"json_mode\")\n ]\n else:\n openai_inputs_filtered = []\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*MODEL_PROVIDERS_LIST, \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n options_metadata=[MODELS_METADATA[key] for key in MODEL_PROVIDERS_LIST if key in MODELS_METADATA]\n + [{\"icon\": \"brain\"}],\n ),\n *openai_inputs_filtered,\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Chat History Messages\",\n value=100,\n info=\"Number of chat history messages to retrieve.\",\n advanced=True,\n show=True,\n ),\n MultilineInput(\n name=\"format_instructions\",\n display_name=\"Output Format Instructions\",\n info=\"Generic Template for structured output formatting. Valid only with Structured response.\",\n value=(\n \"You are an AI that extracts structured JSON objects from unstructured text. \"\n \"Use a predefined schema with expected types (str, int, float, bool, dict). \"\n \"Extract ALL relevant instances that match the schema - if multiple patterns exist, capture them all. \"\n \"Fill missing or ambiguous values with defaults: null for missing values. \"\n \"Remove exact duplicates but keep variations that have different field values. \"\n \"Always return valid JSON in the expected format, never throw errors. \"\n \"If multiple objects can be extracted, return them all in the structured format.\"\n ),\n advanced=True,\n ),\n TableInput(\n name=\"output_schema\",\n display_name=\"Output Schema\",\n info=(\n \"Schema Validation: Define the structure and data types for structured output. \"\n \"No validation if no output schema.\"\n ),\n advanced=True,\n required=False,\n value=[],\n table_schema=[\n {\n \"name\": \"name\",\n \"display_name\": \"Name\",\n \"type\": \"str\",\n \"description\": \"Specify the name of the output field.\",\n \"default\": \"field\",\n \"edit_mode\": EditMode.INLINE,\n },\n {\n \"name\": \"description\",\n \"display_name\": \"Description\",\n \"type\": \"str\",\n \"description\": \"Describe the purpose of the output field.\",\n \"default\": \"description of field\",\n \"edit_mode\": EditMode.POPOVER,\n },\n {\n \"name\": \"type\",\n \"display_name\": \"Type\",\n \"type\": \"str\",\n \"edit_mode\": EditMode.INLINE,\n \"description\": (\"Indicate the data type of the output field (e.g., str, int, float, bool, dict).\"),\n \"options\": [\"str\", \"int\", \"float\", \"bool\", \"dict\"],\n \"default\": \"str\",\n },\n {\n \"name\": \"multiple\",\n \"display_name\": \"As List\",\n \"type\": \"boolean\",\n \"description\": \"Set to True if this output field should be a list of the specified type.\",\n \"default\": \"False\",\n \"edit_mode\": EditMode.INLINE,\n },\n ],\n ),\n *LCToolsAgentComponent.get_base_inputs(),\n # removed memory inputs from agent component\n # *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [\n Output(name=\"response\", display_name=\"Response\", method=\"message_response\"),\n Output(name=\"structured_response\", display_name=\"Structured Response\", method=\"json_response\", tool_mode=False),\n ]\n\n async def get_agent_requirements(self):\n \"\"\"Get the agent requirements for the agent.\"\"\"\n llm_model, display_name = await self.get_llm()\n if llm_model is None:\n msg = \"No language model selected. Please choose a model to proceed.\"\n raise ValueError(msg)\n self.model_name = get_model_name(llm_model, display_name=display_name)\n\n # Get memory data\n self.chat_history = await self.get_memory_data()\n if isinstance(self.chat_history, Message):\n self.chat_history = [self.chat_history]\n\n # Add current date tool if enabled\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n current_date_tool = (CurrentDateComponent(**self.get_base_args()).to_toolkit()).pop(0)\n if not isinstance(current_date_tool, StructuredTool):\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise TypeError(msg)\n self.tools.append(current_date_tool)\n return llm_model, self.chat_history, self.tools\n\n async def message_response(self) -> Message:\n try:\n llm_model, self.chat_history, self.tools = await self.get_agent_requirements()\n # Set up and run agent\n self.set(\n llm=llm_model,\n tools=self.tools or [],\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n result = await self.run_agent(agent)\n\n # Store result for potential JSON output\n self._agent_result = result\n\n except (ValueError, TypeError, KeyError) as e:\n await logger.aerror(f\"{type(e).__name__}: {e!s}\")\n raise\n except ExceptionWithMessageError as e:\n await logger.aerror(f\"ExceptionWithMessageError occurred: {e}\")\n raise\n # Avoid catching blind Exception; let truly unexpected exceptions propagate\n except Exception as e:\n await logger.aerror(f\"Unexpected error: {e!s}\")\n raise\n else:\n return result\n\n def _preprocess_schema(self, schema):\n \"\"\"Preprocess schema to ensure correct data types for build_model_from_schema.\"\"\"\n processed_schema = []\n for field in schema:\n processed_field = {\n \"name\": str(field.get(\"name\", \"field\")),\n \"type\": str(field.get(\"type\", \"str\")),\n \"description\": str(field.get(\"description\", \"\")),\n \"multiple\": field.get(\"multiple\", False),\n }\n # Ensure multiple is handled correctly\n if isinstance(processed_field[\"multiple\"], str):\n processed_field[\"multiple\"] = processed_field[\"multiple\"].lower() in [\"true\", \"1\", \"t\", \"y\", \"yes\"]\n processed_schema.append(processed_field)\n return processed_schema\n\n async def build_structured_output_base(self, content: str):\n \"\"\"Build structured output with optional BaseModel validation.\"\"\"\n json_pattern = r\"\\{.*\\}\"\n schema_error_msg = \"Try setting an output schema\"\n\n # Try to parse content as JSON first\n json_data = None\n try:\n json_data = json.loads(content)\n except json.JSONDecodeError:\n json_match = re.search(json_pattern, content, re.DOTALL)\n if json_match:\n try:\n json_data = json.loads(json_match.group())\n except json.JSONDecodeError:\n return {\"content\": content, \"error\": schema_error_msg}\n else:\n return {\"content\": content, \"error\": schema_error_msg}\n\n # If no output schema provided, return parsed JSON without validation\n if not hasattr(self, \"output_schema\") or not self.output_schema or len(self.output_schema) == 0:\n return json_data\n\n # Use BaseModel validation with schema\n try:\n processed_schema = self._preprocess_schema(self.output_schema)\n output_model = build_model_from_schema(processed_schema)\n\n # Validate against the schema\n if isinstance(json_data, list):\n # Multiple objects\n validated_objects = []\n for item in json_data:\n try:\n validated_obj = output_model.model_validate(item)\n validated_objects.append(validated_obj.model_dump())\n except ValidationError as e:\n await logger.aerror(f\"Validation error for item: {e}\")\n # Include invalid items with error info\n validated_objects.append({\"data\": item, \"validation_error\": str(e)})\n return validated_objects\n\n # Single object\n try:\n validated_obj = output_model.model_validate(json_data)\n return [validated_obj.model_dump()] # Return as list for consistency\n except ValidationError as e:\n await logger.aerror(f\"Validation error: {e}\")\n return [{\"data\": json_data, \"validation_error\": str(e)}]\n\n except (TypeError, ValueError) as e:\n await logger.aerror(f\"Error building structured output: {e}\")\n # Fallback to parsed JSON without validation\n return json_data\n\n async def json_response(self) -> Data:\n \"\"\"Convert agent response to structured JSON Data output with schema validation.\"\"\"\n # Always use structured chat agent for JSON response mode for better JSON formatting\n try:\n system_components = []\n\n # 1. Agent Instructions (system_prompt)\n agent_instructions = getattr(self, \"system_prompt\", \"\") or \"\"\n if agent_instructions:\n system_components.append(f\"{agent_instructions}\")\n\n # 2. Format Instructions\n format_instructions = getattr(self, \"format_instructions\", \"\") or \"\"\n if format_instructions:\n system_components.append(f\"Format instructions: {format_instructions}\")\n\n # 3. Schema Information from BaseModel\n if hasattr(self, \"output_schema\") and self.output_schema and len(self.output_schema) > 0:\n try:\n processed_schema = self._preprocess_schema(self.output_schema)\n output_model = build_model_from_schema(processed_schema)\n schema_dict = output_model.model_json_schema()\n schema_info = (\n \"You are given some text that may include format instructions, \"\n \"explanations, or other content alongside a JSON schema.\\n\\n\"\n \"Your task:\\n\"\n \"- Extract only the JSON schema.\\n\"\n \"- Return it as valid JSON.\\n\"\n \"- Do not include format instructions, explanations, or extra text.\\n\\n\"\n \"Input:\\n\"\n f\"{json.dumps(schema_dict, indent=2)}\\n\\n\"\n \"Output (only JSON schema):\"\n )\n system_components.append(schema_info)\n except (ValidationError, ValueError, TypeError, KeyError) as e:\n await logger.aerror(f\"Could not build schema for prompt: {e}\", exc_info=True)\n\n # Combine all components\n combined_instructions = \"\\n\\n\".join(system_components) if system_components else \"\"\n llm_model, self.chat_history, self.tools = await self.get_agent_requirements()\n self.set(\n llm=llm_model,\n tools=self.tools or [],\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=combined_instructions,\n )\n\n # Create and run structured chat agent\n try:\n structured_agent = self.create_agent_runnable()\n except (NotImplementedError, ValueError, TypeError) as e:\n await logger.aerror(f\"Error with structured chat agent: {e}\")\n raise\n try:\n result = await self.run_agent(structured_agent)\n except (ExceptionWithMessageError, ValueError, TypeError, RuntimeError) as e:\n await logger.aerror(f\"Error with structured agent result: {e}\")\n raise\n # Extract content from structured agent result\n if hasattr(result, \"content\"):\n content = result.content\n elif hasattr(result, \"text\"):\n content = result.text\n else:\n content = str(result)\n\n except (ExceptionWithMessageError, ValueError, TypeError, NotImplementedError, AttributeError) as e:\n await logger.aerror(f\"Error with structured chat agent: {e}\")\n # Fallback to regular agent\n content_str = \"No content returned from agent\"\n return Data(data={\"content\": content_str, \"error\": str(e)})\n\n # Process with structured output validation\n try:\n structured_output = await self.build_structured_output_base(content)\n\n # Handle different output formats\n if isinstance(structured_output, list) and structured_output:\n if len(structured_output) == 1:\n return Data(data=structured_output[0])\n return Data(data={\"results\": structured_output})\n if isinstance(structured_output, dict):\n return Data(data=structured_output)\n return Data(data={\"content\": content})\n\n except (ValueError, TypeError) as e:\n await logger.aerror(f\"Error in structured output processing: {e}\")\n return Data(data={\"content\": content, \"error\": str(e)})\n\n async def get_memory_data(self):\n # TODO: This is a temporary fix to avoid message duplication. We should develop a function for this.\n messages = (\n await MemoryComponent(**self.get_base_args())\n .set(session_id=self.graph.session_id, order=\"Ascending\", n_messages=self.n_messages)\n .retrieve_messages()\n )\n return [\n message for message in messages if getattr(message, \"id\", None) != getattr(self.input_value, \"id\", None)\n ]\n\n async def get_llm(self):\n if not isinstance(self.agent_llm, str):\n return self.agent_llm, None\n\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if not provider_info:\n msg = f\"Invalid model provider: {self.agent_llm}\"\n raise ValueError(msg)\n\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n\n return self._build_llm_model(component_class, inputs, prefix), display_name\n\n except (AttributeError, ValueError, TypeError, RuntimeError) as e:\n await logger.aerror(f\"Error building {self.agent_llm} language model: {e!s}\")\n msg = f\"Failed to initialize language model: {e!s}\"\n raise ValueError(msg) from e\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {}\n for input_ in inputs:\n if hasattr(self, f\"{prefix}{input_.name}\"):\n model_kwargs[input_.name] = getattr(self, f\"{prefix}{input_.name}\")\n return component.set(**model_kwargs).build_model()\n\n def set_component_params(self, component):\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\")\n # Filter out json_mode and only use attributes that exist on this component\n model_kwargs = {}\n for input_ in inputs:\n if hasattr(self, f\"{prefix}{input_.name}\"):\n model_kwargs[input_.name] = getattr(self, f\"{prefix}{input_.name}\")\n\n return component.set(**model_kwargs)\n return component\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n async def update_build_config(\n self, build_config: dotdict, field_value: str, field_name: str | None = None\n ) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name in (\"agent_llm\",):\n build_config[\"agent_llm\"][\"value\"] = field_value\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = await update_component_build_config(\n component_class, build_config, field_value, \"model_name\"\n )\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n build_config[\"agent_llm\"][\"display_name\"] = \"Model Provider\"\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n options_metadata=[MODELS_METADATA[key] for key in sorted(MODELS_METADATA.keys())]\n + [{\"icon\": \"brain\"}],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if (\n isinstance(self.agent_llm, str)\n and self.agent_llm in MODEL_PROVIDERS_DICT\n and field_name in MODEL_DYNAMIC_UPDATE_FIELDS\n ):\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n component_class = self.set_component_params(component_class)\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = await update_component_build_config(\n component_class, build_config, field_value, \"model_name\"\n )\n return dotdict({k: v.to_dict() if hasattr(v, \"to_dict\") else v for k, v in build_config.items()})\n\n def _get_tools(self) -> list[Tool]:\n component_toolkit = get_component_toolkit()\n tools_names = self._build_tools_names()\n agent_description = self.get_tool_description()\n # TODO: Agent Description Depreciated Feature to be removed\n description = f\"{agent_description}{tools_names}\"\n tools = component_toolkit(component=self).get_tools(\n tool_name=\"Call_Agent\", tool_description=description, callbacks=self.get_langchain_callbacks()\n )\n if hasattr(self, \"tools_metadata\"):\n tools = component_toolkit(component=self, metadata=self.tools_metadata).update_tools_metadata(tools=tools)\n return tools\n"
1413
+ },
1414
+ "format_instructions": {
1415
+ "_input_type": "MultilineInput",
1416
+ "advanced": true,
1417
+ "copy_field": false,
1418
+ "display_name": "Output Format Instructions",
1419
+ "dynamic": false,
1420
+ "info": "Generic Template for structured output formatting. Valid only with Structured response.",
1421
+ "input_types": [
1422
+ "Message"
1423
+ ],
1424
+ "list": false,
1425
+ "list_add_label": "Add More",
1426
+ "load_from_db": false,
1427
+ "multiline": true,
1428
+ "name": "format_instructions",
1429
+ "placeholder": "",
1430
+ "required": false,
1431
+ "show": true,
1432
+ "title_case": false,
1433
+ "tool_mode": false,
1434
+ "trace_as_input": true,
1435
+ "trace_as_metadata": true,
1436
+ "type": "str",
1437
+ "value": "You are an AI that extracts structured JSON objects from unstructured text. Use a predefined schema with expected types (str, int, float, bool, dict). Extract ALL relevant instances that match the schema - if multiple patterns exist, capture them all. Fill missing or ambiguous values with defaults: null for missing values. Remove exact duplicates but keep variations that have different field values. Always return valid JSON in the expected format, never throw errors. If multiple objects can be extracted, return them all in the structured format."
1393
1438
  },
1394
1439
  "handle_parsing_errors": {
1395
1440
  "_input_type": "BoolInput",
@@ -1432,24 +1477,6 @@
1432
1477
  "type": "str",
1433
1478
  "value": ""
1434
1479
  },
1435
- "json_mode": {
1436
- "_input_type": "BoolInput",
1437
- "advanced": true,
1438
- "display_name": "JSON Mode",
1439
- "dynamic": false,
1440
- "info": "If True, it will output JSON regardless of passing a schema.",
1441
- "list": false,
1442
- "list_add_label": "Add More",
1443
- "name": "json_mode",
1444
- "placeholder": "",
1445
- "required": false,
1446
- "show": true,
1447
- "title_case": false,
1448
- "tool_mode": false,
1449
- "trace_as_metadata": true,
1450
- "type": "bool",
1451
- "value": false
1452
- },
1453
1480
  "max_iterations": {
1454
1481
  "_input_type": "IntInput",
1455
1482
  "advanced": true,
@@ -1543,12 +1570,20 @@
1543
1570
  "gpt-4.1",
1544
1571
  "gpt-4.1-mini",
1545
1572
  "gpt-4.1-nano",
1546
- "gpt-4.5-preview",
1547
1573
  "gpt-4-turbo",
1548
1574
  "gpt-4-turbo-preview",
1549
1575
  "gpt-4",
1550
1576
  "gpt-3.5-turbo",
1551
- "o1"
1577
+ "gpt-5",
1578
+ "gpt-5-mini",
1579
+ "gpt-5-nano",
1580
+ "gpt-5-chat-latest",
1581
+ "o1",
1582
+ "o3-mini",
1583
+ "o3",
1584
+ "o3-pro",
1585
+ "o4-mini",
1586
+ "o4-mini-high"
1552
1587
  ],
1553
1588
  "options_metadata": [],
1554
1589
  "placeholder": "",
@@ -1599,6 +1634,68 @@
1599
1634
  "type": "str",
1600
1635
  "value": ""
1601
1636
  },
1637
+ "output_schema": {
1638
+ "_input_type": "TableInput",
1639
+ "advanced": true,
1640
+ "display_name": "Output Schema",
1641
+ "dynamic": false,
1642
+ "info": "Schema Validation: Define the structure and data types for structured output. No validation if no output schema.",
1643
+ "is_list": true,
1644
+ "list_add_label": "Add More",
1645
+ "name": "output_schema",
1646
+ "placeholder": "",
1647
+ "required": false,
1648
+ "show": true,
1649
+ "table_icon": "Table",
1650
+ "table_schema": [
1651
+ {
1652
+ "default": "field",
1653
+ "description": "Specify the name of the output field.",
1654
+ "display_name": "Name",
1655
+ "edit_mode": "inline",
1656
+ "name": "name",
1657
+ "type": "str"
1658
+ },
1659
+ {
1660
+ "default": "description of field",
1661
+ "description": "Describe the purpose of the output field.",
1662
+ "display_name": "Description",
1663
+ "edit_mode": "popover",
1664
+ "name": "description",
1665
+ "type": "str"
1666
+ },
1667
+ {
1668
+ "default": "str",
1669
+ "description": "Indicate the data type of the output field (e.g., str, int, float, bool, dict).",
1670
+ "display_name": "Type",
1671
+ "edit_mode": "inline",
1672
+ "name": "type",
1673
+ "options": [
1674
+ "str",
1675
+ "int",
1676
+ "float",
1677
+ "bool",
1678
+ "dict"
1679
+ ],
1680
+ "type": "str"
1681
+ },
1682
+ {
1683
+ "default": "False",
1684
+ "description": "Set to True if this output field should be a list of the specified type.",
1685
+ "display_name": "As List",
1686
+ "edit_mode": "inline",
1687
+ "name": "multiple",
1688
+ "type": "boolean"
1689
+ }
1690
+ ],
1691
+ "title_case": false,
1692
+ "tool_mode": false,
1693
+ "trace_as_metadata": true,
1694
+ "trigger_icon": "Table",
1695
+ "trigger_text": "Open table",
1696
+ "type": "table",
1697
+ "value": []
1698
+ },
1602
1699
  "seed": {
1603
1700
  "_input_type": "IntInput",
1604
1701
  "advanced": true,