langflow-base-nightly 0.5.0.dev33__py3-none-any.whl → 0.5.0.dev35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. langflow/alembic/versions/1cb603706752_modify_uniqueness_constraint_on_file_.py +279 -0
  2. langflow/api/v1/endpoints.py +1 -1
  3. langflow/base/composio/composio_base.py +1092 -126
  4. langflow/components/agents/mcp_component.py +21 -4
  5. langflow/components/composio/__init__.py +24 -0
  6. langflow/components/composio/composio_api.py +116 -136
  7. langflow/components/composio/dropbox_compnent.py +11 -0
  8. langflow/components/composio/github_composio.py +1 -639
  9. langflow/components/composio/gmail_composio.py +26 -394
  10. langflow/components/composio/googlecalendar_composio.py +2 -778
  11. langflow/components/composio/googlemeet_composio.py +11 -0
  12. langflow/components/composio/googletasks_composio.py +8 -0
  13. langflow/components/composio/linear_composio.py +11 -0
  14. langflow/components/composio/outlook_composio.py +1 -755
  15. langflow/components/composio/reddit_composio.py +11 -0
  16. langflow/components/composio/slack_composio.py +1 -576
  17. langflow/components/composio/slackbot_composio.py +11 -0
  18. langflow/components/composio/supabase_composio.py +11 -0
  19. langflow/components/composio/todoist_composio.py +11 -0
  20. langflow/components/composio/youtube_composio.py +11 -0
  21. langflow/components/data/kb_ingest.py +15 -16
  22. langflow/components/processing/save_file.py +31 -4
  23. langflow/custom/utils.py +30 -7
  24. langflow/frontend/assets/{SlackIcon-Bikuxo8x.js → SlackIcon-B260Qg_R.js} +1 -1
  25. langflow/frontend/assets/{Wikipedia-B6aCFf5-.js → Wikipedia-BB2mbgyd.js} +1 -1
  26. langflow/frontend/assets/{Wolfram-CekL_M-a.js → Wolfram-DytXC9hF.js} +1 -1
  27. langflow/frontend/assets/{index-D1RgjMON.js → index-3TJWUdmx.js} +1 -1
  28. langflow/frontend/assets/{index-B4xLpgbM.js → index-3qMh9x6K.js} +1 -1
  29. langflow/frontend/assets/{index-DEuXrfXH.js → index-3uOAA_XX.js} +1 -1
  30. langflow/frontend/assets/{index-DTJX3yQa.js → index-4eRtaV45.js} +1 -1
  31. langflow/frontend/assets/index-7xXgqu09.js +1 -0
  32. langflow/frontend/assets/{index-BRNhftot.js → index-AY5Dm2mG.js} +1 -1
  33. langflow/frontend/assets/{index-4Tl3Nxdo.js → index-AlJ7td-D.js} +1 -1
  34. langflow/frontend/assets/{index-D2nHdRne.js → index-B-c82Fnu.js} +1 -1
  35. langflow/frontend/assets/{index-C3RZz8WE.js → index-B2ggrBuR.js} +1 -1
  36. langflow/frontend/assets/{index-in188l0A.js → index-B536IPXH.js} +1 -1
  37. langflow/frontend/assets/{index-CP0tFKwN.js → index-B5ed-sAv.js} +1 -1
  38. langflow/frontend/assets/{index-CAzSTGAM.js → index-B8TlNgn-.js} +1 -1
  39. langflow/frontend/assets/{index-09CVJwsY.js → index-B8y58M9b.js} +1 -1
  40. langflow/frontend/assets/{index-B9uOBe6Y.js → index-B9Mo3ndZ.js} +1 -1
  41. langflow/frontend/assets/{index-DAJafn16.js → index-BCK-ZyIh.js} +1 -1
  42. langflow/frontend/assets/{index-Cy-ZEfWh.js → index-BEDxAk3N.js} +1 -1
  43. langflow/frontend/assets/{index-DbmqjLy6.js → index-BEKoRwsX.js} +1 -1
  44. langflow/frontend/assets/{index-BcqeL_f4.js → index-BIkqesA-.js} +1 -1
  45. langflow/frontend/assets/{index-7x3wNZ-4.js → index-BJrY2Fiu.js} +1 -1
  46. langflow/frontend/assets/{index-Iamzh9ZT.js → index-BKvKC-12.js} +1 -1
  47. langflow/frontend/assets/{index-COqjpsdy.js → index-BLROcaSz.js} +1 -1
  48. langflow/frontend/assets/{index-BRwkzs92.js → index-BNbWMmAV.js} +1 -1
  49. langflow/frontend/assets/{index-C_UkF-RJ.js → index-BOEf7-ty.js} +1 -1
  50. langflow/frontend/assets/index-BOYTBrh9.js +1 -0
  51. langflow/frontend/assets/{index-DDcpxWU4.js → index-BQB-iDYl.js} +1 -1
  52. langflow/frontend/assets/{index-Crq_yhkG.js → index-BRWNIt9F.js} +1 -1
  53. langflow/frontend/assets/{index-DmaQAn3K.js → index-BVHvIhT5.js} +1 -1
  54. langflow/frontend/assets/{index-Cs_jt3dj.js → index-BVtf6m9S.js} +1 -1
  55. langflow/frontend/assets/{index-T2jJOG85.js → index-BWq9GTzt.js} +1 -1
  56. langflow/frontend/assets/{index-Dz0r9Idb.js → index-BXMhmvTj.js} +1 -1
  57. langflow/frontend/assets/{index-eJwu5YEi.js → index-Ba3RTMXI.js} +1 -1
  58. langflow/frontend/assets/{index-xVx59Op-.js → index-Baka5dKE.js} +1 -1
  59. langflow/frontend/assets/{index-DnusMCK1.js → index-BbsND1Qg.js} +1 -1
  60. langflow/frontend/assets/index-BcgB3rXH.js +1 -0
  61. langflow/frontend/assets/{index-CmiRgF_-.js → index-BdIWbCEL.js} +1 -1
  62. langflow/frontend/assets/{index-BllNr21U.js → index-BdYgKk1d.js} +1 -1
  63. langflow/frontend/assets/{index-BIKbxmIh.js → index-BeNby7qF.js} +1 -1
  64. langflow/frontend/assets/{index-CUe1ivTn.js → index-BejHxU5W.js} +1 -1
  65. langflow/frontend/assets/{index-CVphnxXi.js → index-Bisa4IQF.js} +1 -1
  66. langflow/frontend/assets/{index-Cr2oy5K2.js → index-BjENqyKe.js} +1 -1
  67. langflow/frontend/assets/{index-CEn_71Wk.js → index-BlBl2tvQ.js} +1 -1
  68. langflow/frontend/assets/{index-DOb9c2bf.js → index-BnLT29qW.js} +1 -1
  69. langflow/frontend/assets/{index-BRizlHaN.js → index-BqUeOc7Y.js} +1 -1
  70. langflow/frontend/assets/{index-D7nFs6oq.js → index-BsBWP-Dh.js} +1 -1
  71. langflow/frontend/assets/{index-BlRTHXW5.js → index-BtJ2o21k.js} +1 -1
  72. langflow/frontend/assets/{index-AOX7bbjJ.js → index-BxWXWRmZ.js} +1 -1
  73. langflow/frontend/assets/{index-B20KmxhS.js → index-BxkZkBgQ.js} +1 -1
  74. langflow/frontend/assets/{index-DoFlaGDx.js → index-Bxml6wXu.js} +1 -1
  75. langflow/frontend/assets/{index-B9KRIJFi.js → index-ByFXr9Iq.js} +1 -1
  76. langflow/frontend/assets/{index-CY6LUi4V.js → index-C2Xd7UkR.js} +1 -1
  77. langflow/frontend/assets/index-C76aBV_h.js +1 -0
  78. langflow/frontend/assets/{index-9gkURvG2.js → index-C7V5U9yH.js} +1 -1
  79. langflow/frontend/assets/{index-BDmbsLY2.js → index-C7x9R_Yo.js} +1 -1
  80. langflow/frontend/assets/{index-DI0zAExi.js → index-C8KD3LPb.js} +1 -1
  81. langflow/frontend/assets/{index-DzDNhMMW.js → index-C9N80hP8.js} +1 -1
  82. langflow/frontend/assets/{index-6GWpsedd.js → index-CDFLVFB4.js} +1 -1
  83. langflow/frontend/assets/{index-pkOi9P45.js → index-CF4dtI6S.js} +1 -1
  84. langflow/frontend/assets/{index-CdwjD4IX.js → index-CG7cp0nD.js} +1 -1
  85. langflow/frontend/assets/{index-J0pvFqLk.js → index-CHFO5O4g.js} +1 -1
  86. langflow/frontend/assets/{index-5G402gB8.js → index-CJwYfDBz.js} +1 -1
  87. langflow/frontend/assets/{index-BzCjyHto.js → index-CMGZGIx_.js} +1 -1
  88. langflow/frontend/assets/{index-Bm7a2vMS.js → index-COL0eiWI.js} +1 -1
  89. langflow/frontend/assets/{index-JHCxbvlW.js → index-CWWo2zOA.js} +1 -1
  90. langflow/frontend/assets/{index-C7wDSVVH.js → index-C_1RBTul.js} +1 -1
  91. langflow/frontend/assets/{index-BIjUtp6d.js → index-Ccb5B8zG.js} +1 -1
  92. langflow/frontend/assets/{index-yIh6-LZT.js → index-Cd5zuUUK.js} +1 -1
  93. langflow/frontend/assets/{index-CPIdMJkX.js → index-CkQ-bJ4G.js} +1 -1
  94. langflow/frontend/assets/{index-TRyDa01A.js → index-CkSzjCqM.js} +1 -1
  95. langflow/frontend/assets/{index-CSRizl2S.js → index-CoUlHbtg.js} +1 -1
  96. langflow/frontend/assets/index-Cpgkb0Q3.js +1 -0
  97. langflow/frontend/assets/{index-Cp7Pmn03.js → index-CqDUqHfd.js} +1 -1
  98. langflow/frontend/assets/{index-CGVDXKtN.js → index-Ct9_T9ox.js} +1 -1
  99. langflow/frontend/assets/{index-BwlYjc56.js → index-CvQ0w8Pj.js} +1 -1
  100. langflow/frontend/assets/{index-DkJCCraf.js → index-CwIxqYlT.js} +1 -1
  101. langflow/frontend/assets/{index-Bgd7yLoW.js → index-Cx__T92e.js} +1 -1
  102. langflow/frontend/assets/{index-RveG4dl9.js → index-D-zkHcob.js} +1 -1
  103. langflow/frontend/assets/{index-DVV_etfW.js → index-D0HmkH0H.js} +1 -1
  104. langflow/frontend/assets/{index-CglSqvB5.js → index-D0s9f6Re.js} +1 -1
  105. langflow/frontend/assets/{index-J98sU-1p.js → index-D5PeCofu.js} +1 -1
  106. langflow/frontend/assets/{index-BJIsQS8D.js → index-D87Zw62M.js} +1 -1
  107. langflow/frontend/assets/{index-FYcoJPMP.js → index-D9eflZfP.js} +1 -1
  108. langflow/frontend/assets/{index-DJs6FoYC.js → index-DDNNv4C0.js} +1 -1
  109. langflow/frontend/assets/index-DHlEwAxb.js +1 -0
  110. langflow/frontend/assets/{index-DqDQk0Cu.js → index-DIqSyDVO.js} +1 -1
  111. langflow/frontend/assets/{index-DOI0ceS-.js → index-DK8vNpXK.js} +1 -1
  112. langflow/frontend/assets/{index-D29n5mus.js → index-DKEXZFUO.js} +1 -1
  113. langflow/frontend/assets/{index-dfaj9-hY.js → index-DPX6X_bw.js} +1 -1
  114. langflow/frontend/assets/{index-CgbINWS8.js → index-DS1EgA10.js} +1 -1
  115. langflow/frontend/assets/{index-C69gdJqw.js → index-DS9I4y48.js} +1 -1
  116. langflow/frontend/assets/{index-B2EmwqKj.js → index-DWkMJnbd.js} +1 -1
  117. langflow/frontend/assets/{index-CIYzjH2y.js → index-DWr_zPkx.js} +1 -1
  118. langflow/frontend/assets/{index-D-HTZ68O.js → index-DX7XsAcx.js} +1 -1
  119. langflow/frontend/assets/{index-Cq30cQcP.js → index-DZzbmg3J.js} +1 -1
  120. langflow/frontend/assets/{index-BZCt_UnJ.js → index-DasrI03Y.js} +1 -1
  121. langflow/frontend/assets/index-DdzVmJHE.js +1 -0
  122. langflow/frontend/assets/{index-DmvjdU1N.js → index-DhzEUXfr.js} +1 -1
  123. langflow/frontend/assets/{index-B_ytx_iA.js → index-DpJiH-Rk.js} +1 -1
  124. langflow/frontend/assets/{index-Cyk3aCmP.js → index-DpQKtcXu.js} +1 -1
  125. langflow/frontend/assets/{index-DrvRK4_i.js → index-Dpz3oBf5.js} +1 -1
  126. langflow/frontend/assets/{index-DF0oWRdd.js → index-DqSH4x-R.js} +1 -1
  127. langflow/frontend/assets/{index-DX_InNVT.js → index-DtJyCbzF.js} +1 -1
  128. langflow/frontend/assets/{index-B4AtFbkN.js → index-Du9aJK7m.js} +1 -1
  129. langflow/frontend/assets/{index-qXcoVIRo.js → index-DuAeoC-H.js} +1 -1
  130. langflow/frontend/assets/{index-D7Vx6mgS.js → index-DxIs8VSp.js} +1 -1
  131. langflow/frontend/assets/{index-U7J1YiWE.js → index-DyJDHm2D.js} +1 -1
  132. langflow/frontend/assets/{index-1MEYR1La.js → index-DzeIsaBm.js} +1 -1
  133. langflow/frontend/assets/{index-Cbwk3f-p.js → index-DztLFiip.js} +1 -1
  134. langflow/frontend/assets/{index-C_2G2ZqJ.js → index-GODbXlHC.js} +1 -1
  135. langflow/frontend/assets/{index-2vQdFIK_.js → index-G_U_kPAd.js} +1 -1
  136. langflow/frontend/assets/{index-DS4F_Phe.js → index-IFGgPiye.js} +1 -1
  137. langflow/frontend/assets/{index-5hW8VleF.js → index-LrMzDsq9.js} +1 -1
  138. langflow/frontend/assets/{index-L7FKc9QN.js → index-R7q8cAek.js} +1 -1
  139. langflow/frontend/assets/{index-BRE8A4Q_.js → index-Uq2ij_SS.js} +1 -1
  140. langflow/frontend/assets/{index-Bn4HAVDG.js → index-VHmUHUUU.js} +1 -1
  141. langflow/frontend/assets/{index-VO-pk-Hg.js → index-VZnN0P6C.js} +1 -1
  142. langflow/frontend/assets/{index-Dy7ehgeV.js → index-VcXZzovW.js} +1 -1
  143. langflow/frontend/assets/{index-DNS4La1f.js → index-Ym6gz0T6.js} +1 -1
  144. langflow/frontend/assets/{index-UI2ws3qp.js → index-ci4XHjbJ.js} +176 -176
  145. langflow/frontend/assets/{index-DlMAYATX.js → index-dkS0ek2S.js} +1 -1
  146. langflow/frontend/assets/{index-Dc0p1Oxl.js → index-hOkEW3JP.js} +1 -1
  147. langflow/frontend/assets/{index-KnS52ylc.js → index-js8ceOaP.js} +1 -1
  148. langflow/frontend/assets/{index-DtCsjX48.js → index-lKEJpUsF.js} +1 -1
  149. langflow/frontend/assets/{index-BO4fl1uU.js → index-mBjJYD9q.js} +1 -1
  150. langflow/frontend/assets/{index-C_K6Tof7.js → index-r1LZg-PY.js} +1 -1
  151. langflow/frontend/assets/index-rcdQpNcU.js +1 -0
  152. langflow/frontend/assets/{index-_3qag0I4.js → index-sS6XLk3j.js} +1 -1
  153. langflow/frontend/assets/{index-C6P0vvSP.js → index-tOy_uloT.js} +1 -1
  154. langflow/frontend/assets/lazyIconImports-Bh1TFfvH.js +2 -0
  155. langflow/frontend/assets/{use-post-add-user-Bt6vZvvT.js → use-post-add-user-HN0rRnhv.js} +1 -1
  156. langflow/frontend/index.html +1 -1
  157. langflow/initial_setup/starter_projects/Knowledge Ingestion.json +2 -2
  158. langflow/initial_setup/starter_projects/News Aggregator.json +19 -2
  159. langflow/initial_setup/starter_projects/Nvidia Remix.json +19 -2
  160. langflow/interface/initialize/loading.py +3 -1
  161. langflow/main.py +19 -2
  162. langflow/services/database/models/file/model.py +4 -2
  163. langflow/services/database/service.py +3 -1
  164. langflow/services/telemetry/schema.py +7 -0
  165. langflow/services/telemetry/service.py +25 -0
  166. langflow/services/tracing/service.py +14 -4
  167. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev35.dist-info}/METADATA +1 -1
  168. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev35.dist-info}/RECORD +170 -152
  169. langflow/frontend/assets/lazyIconImports-kvf_Kak2.js +0 -2
  170. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev35.dist-info}/WHEEL +0 -0
  171. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev35.dist-info}/entry_points.txt +0 -0
@@ -2518,7 +2518,7 @@
2518
2518
  "legacy": false,
2519
2519
  "lf_version": "1.4.2",
2520
2520
  "metadata": {
2521
- "code_hash": "d58eb6d2b3e7",
2521
+ "code_hash": "6839fa3cae99",
2522
2522
  "module": "langflow.components.agents.mcp_component.MCPToolsComponent"
2523
2523
  },
2524
2524
  "minimized": false,
@@ -2545,6 +2545,23 @@
2545
2545
  "score": 0.003932426697386162,
2546
2546
  "template": {
2547
2547
  "_type": "Component",
2548
+ "api_key": {
2549
+ "_input_type": "SecretStrInput",
2550
+ "advanced": true,
2551
+ "display_name": "Langflow API Key",
2552
+ "dynamic": false,
2553
+ "info": "Langflow API key for authentication when fetching MCP servers and tools.",
2554
+ "input_types": [],
2555
+ "load_from_db": true,
2556
+ "name": "api_key",
2557
+ "password": true,
2558
+ "placeholder": "",
2559
+ "required": false,
2560
+ "show": true,
2561
+ "title_case": false,
2562
+ "type": "str",
2563
+ "value": ""
2564
+ },
2548
2565
  "code": {
2549
2566
  "advanced": true,
2550
2567
  "dynamic": true,
@@ -2561,7 +2578,7 @@
2561
2578
  "show": true,
2562
2579
  "title_case": false,
2563
2580
  "type": "code",
2564
- "value": "from __future__ import annotations\n\nimport asyncio\nimport uuid\nfrom typing import Any\n\nfrom langchain_core.tools import StructuredTool # noqa: TC002\n\nfrom langflow.api.v2.mcp import get_server\nfrom langflow.base.agents.utils import maybe_unflatten_dict, safe_cache_get, safe_cache_set\nfrom langflow.base.mcp.util import (\n MCPSseClient,\n MCPStdioClient,\n create_input_schema_from_json_schema,\n update_tools,\n)\nfrom langflow.custom.custom_component.component_with_cache import ComponentWithCache\nfrom langflow.inputs.inputs import InputTypes # noqa: TC001\nfrom langflow.io import DropdownInput, McpInput, MessageTextInput, Output\nfrom langflow.io.schema import flatten_schema, schema_to_langflow_inputs\nfrom langflow.logging import logger\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.services.auth.utils import create_user_longterm_token\n\n# Import get_server from the backend API\nfrom langflow.services.database.models.user.crud import get_user_by_id\nfrom langflow.services.deps import get_session, get_settings_service, get_storage_service\n\n\nclass MCPToolsComponent(ComponentWithCache):\n schema_inputs: list = []\n tools: list[StructuredTool] = []\n _not_load_actions: bool = False\n _tool_cache: dict = {}\n _last_selected_server: str | None = None # Cache for the last selected server\n\n def __init__(self, **data) -> None:\n super().__init__(**data)\n # Initialize cache keys to avoid CacheMiss when accessing them\n self._ensure_cache_structure()\n\n # Initialize clients with access to the component cache\n self.stdio_client: MCPStdioClient = MCPStdioClient(component_cache=self._shared_component_cache)\n self.sse_client: MCPSseClient = MCPSseClient(component_cache=self._shared_component_cache)\n\n def _ensure_cache_structure(self):\n \"\"\"Ensure the cache has the required structure.\"\"\"\n # Check if servers key exists and is not CacheMiss\n servers_value = safe_cache_get(self._shared_component_cache, \"servers\")\n if servers_value is None:\n safe_cache_set(self._shared_component_cache, \"servers\", {})\n\n # Check if last_selected_server key exists and is not CacheMiss\n last_server_value = safe_cache_get(self._shared_component_cache, \"last_selected_server\")\n if last_server_value is None:\n safe_cache_set(self._shared_component_cache, \"last_selected_server\", \"\")\n\n default_keys: list[str] = [\n \"code\",\n \"_type\",\n \"tool_mode\",\n \"tool_placeholder\",\n \"mcp_server\",\n \"tool\",\n ]\n\n display_name = \"MCP Tools\"\n description = \"Connect to an MCP server to use its tools.\"\n documentation: str = \"https://docs.langflow.org/mcp-client\"\n icon = \"Mcp\"\n name = \"MCPTools\"\n\n inputs = [\n McpInput(\n name=\"mcp_server\",\n display_name=\"MCP Server\",\n info=\"Select the MCP Server that will be used by this component\",\n real_time_refresh=True,\n ),\n DropdownInput(\n name=\"tool\",\n display_name=\"Tool\",\n options=[],\n value=\"\",\n info=\"Select the tool to execute\",\n show=False,\n required=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n info=\"Placeholder for the tool\",\n value=\"\",\n show=False,\n tool_mode=False,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Response\", name=\"response\", method=\"build_output\"),\n ]\n\n async def _validate_schema_inputs(self, tool_obj) -> list[InputTypes]:\n \"\"\"Validate and process schema inputs for a tool.\"\"\"\n try:\n if not tool_obj or not hasattr(tool_obj, \"args_schema\"):\n msg = \"Invalid tool object or missing input schema\"\n raise ValueError(msg)\n\n flat_schema = flatten_schema(tool_obj.args_schema.schema())\n input_schema = create_input_schema_from_json_schema(flat_schema)\n if not input_schema:\n msg = f\"Empty input schema for tool '{tool_obj.name}'\"\n raise ValueError(msg)\n\n schema_inputs = schema_to_langflow_inputs(input_schema)\n if not schema_inputs:\n msg = f\"No input parameters defined for tool '{tool_obj.name}'\"\n logger.warning(msg)\n return []\n\n except Exception as e:\n msg = f\"Error validating schema inputs: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n else:\n return schema_inputs\n\n async def update_tool_list(self, mcp_server_value=None):\n # Accepts mcp_server_value as dict {name, config} or uses self.mcp_server\n mcp_server = mcp_server_value if mcp_server_value is not None else getattr(self, \"mcp_server\", None)\n server_name = None\n server_config_from_value = None\n if isinstance(mcp_server, dict):\n server_name = mcp_server.get(\"name\")\n server_config_from_value = mcp_server.get(\"config\")\n else:\n server_name = mcp_server\n if not server_name:\n self.tools = []\n return [], {\"name\": server_name, \"config\": server_config_from_value}\n\n # Use shared cache if available\n servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n cached = servers_cache.get(server_name) if isinstance(servers_cache, dict) else None\n\n if cached is not None:\n self.tools = cached[\"tools\"]\n self.tool_names = cached[\"tool_names\"]\n self._tool_cache = cached[\"tool_cache\"]\n server_config_from_value = cached[\"config\"]\n return self.tools, {\"name\": server_name, \"config\": server_config_from_value}\n\n try:\n async for db in get_session():\n user_id, _ = await create_user_longterm_token(db)\n current_user = await get_user_by_id(db, user_id)\n\n # Try to get server config from DB/API\n server_config = await get_server(\n server_name,\n current_user,\n db,\n storage_service=get_storage_service(),\n settings_service=get_settings_service(),\n )\n\n # If get_server returns empty but we have a config, use it\n if not server_config and server_config_from_value:\n server_config = server_config_from_value\n\n if not server_config:\n self.tools = []\n return [], {\"name\": server_name, \"config\": server_config}\n\n _, tool_list, tool_cache = await update_tools(\n server_name=server_name,\n server_config=server_config,\n mcp_stdio_client=self.stdio_client,\n mcp_sse_client=self.sse_client,\n )\n\n self.tool_names = [tool.name for tool in tool_list if hasattr(tool, \"name\")]\n self._tool_cache = tool_cache\n self.tools = tool_list\n # Cache the result using shared cache\n cache_data = {\n \"tools\": tool_list,\n \"tool_names\": self.tool_names,\n \"tool_cache\": tool_cache,\n \"config\": server_config,\n }\n\n # Safely update the servers cache\n current_servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n if isinstance(current_servers_cache, dict):\n current_servers_cache[server_name] = cache_data\n safe_cache_set(self._shared_component_cache, \"servers\", current_servers_cache)\n\n return tool_list, {\"name\": server_name, \"config\": server_config}\n except (TimeoutError, asyncio.TimeoutError) as e:\n msg = f\"Timeout updating tool list: {e!s}\"\n logger.exception(msg)\n raise TimeoutError(msg) from e\n except Exception as e:\n msg = f\"Error updating tool list: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n async def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict:\n \"\"\"Toggle the visibility of connection-specific fields based on the selected mode.\"\"\"\n try:\n if field_name == \"tool\":\n try:\n if len(self.tools) == 0:\n try:\n self.tools, build_config[\"mcp_server\"][\"value\"] = await self.update_tool_list()\n build_config[\"tool\"][\"options\"] = [tool.name for tool in self.tools]\n build_config[\"tool\"][\"placeholder\"] = \"Select a tool\"\n except (TimeoutError, asyncio.TimeoutError) as e:\n msg = f\"Timeout updating tool list: {e!s}\"\n logger.exception(msg)\n if not build_config[\"tools_metadata\"][\"show\"]:\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"Timeout on MCP server\"\n else:\n build_config[\"tool\"][\"show\"] = False\n except ValueError:\n if not build_config[\"tools_metadata\"][\"show\"]:\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"Error on MCP Server\"\n else:\n build_config[\"tool\"][\"show\"] = False\n\n if field_value == \"\":\n return build_config\n tool_obj = None\n for tool in self.tools:\n if tool.name == field_value:\n tool_obj = tool\n break\n if tool_obj is None:\n msg = f\"Tool {field_value} not found in available tools: {self.tools}\"\n logger.warning(msg)\n return build_config\n await self._update_tool_config(build_config, field_value)\n except Exception as e:\n build_config[\"tool\"][\"options\"] = []\n msg = f\"Failed to update tools: {e!s}\"\n raise ValueError(msg) from e\n else:\n return build_config\n elif field_name == \"mcp_server\":\n if not field_value:\n build_config[\"tool\"][\"show\"] = False\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"\"\n build_config[\"tool_placeholder\"][\"tool_mode\"] = False\n self.remove_non_default_keys(build_config)\n return build_config\n\n build_config[\"tool_placeholder\"][\"tool_mode\"] = True\n\n current_server_name = field_value.get(\"name\") if isinstance(field_value, dict) else field_value\n _last_selected_server = safe_cache_get(self._shared_component_cache, \"last_selected_server\", \"\")\n\n # To avoid unnecessary updates, only proceed if the server has actually changed\n if (_last_selected_server in (current_server_name, \"\")) and build_config[\"tool\"][\"show\"]:\n return build_config\n\n # Determine if \"Tool Mode\" is active by checking if the tool dropdown is hidden.\n is_in_tool_mode = build_config[\"tools_metadata\"][\"show\"]\n safe_cache_set(self._shared_component_cache, \"last_selected_server\", current_server_name)\n\n # Check if tools are already cached for this server before clearing\n cached_tools = None\n if current_server_name:\n servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n if isinstance(servers_cache, dict):\n cached = servers_cache.get(current_server_name)\n if cached is not None:\n cached_tools = cached[\"tools\"]\n self.tools = cached_tools\n self.tool_names = cached[\"tool_names\"]\n self._tool_cache = cached[\"tool_cache\"]\n\n # Only clear tools if we don't have cached tools for the current server\n if not cached_tools:\n self.tools = [] # Clear previous tools only if no cache\n\n self.remove_non_default_keys(build_config) # Clear previous tool inputs\n\n # Only show the tool dropdown if not in tool_mode\n if not is_in_tool_mode:\n build_config[\"tool\"][\"show\"] = True\n if cached_tools:\n # Use cached tools to populate options immediately\n build_config[\"tool\"][\"options\"] = [tool.name for tool in cached_tools]\n build_config[\"tool\"][\"placeholder\"] = \"Select a tool\"\n else:\n # Show loading state only when we need to fetch tools\n build_config[\"tool\"][\"placeholder\"] = \"Loading tools...\"\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = uuid.uuid4()\n else:\n # Keep the tool dropdown hidden if in tool_mode\n self._not_load_actions = True\n build_config[\"tool\"][\"show\"] = False\n\n elif field_name == \"tool_mode\":\n build_config[\"tool\"][\"placeholder\"] = \"\"\n build_config[\"tool\"][\"show\"] = not bool(field_value) and bool(build_config[\"mcp_server\"])\n self.remove_non_default_keys(build_config)\n self.tool = build_config[\"tool\"][\"value\"]\n if field_value:\n self._not_load_actions = True\n else:\n build_config[\"tool\"][\"value\"] = uuid.uuid4()\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"placeholder\"] = \"Loading tools...\"\n elif field_name == \"tools_metadata\":\n self._not_load_actions = False\n\n except Exception as e:\n msg = f\"Error in update_build_config: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n else:\n return build_config\n\n def get_inputs_for_all_tools(self, tools: list) -> dict:\n \"\"\"Get input schemas for all tools.\"\"\"\n inputs = {}\n for tool in tools:\n if not tool or not hasattr(tool, \"name\"):\n continue\n try:\n flat_schema = flatten_schema(tool.args_schema.schema())\n input_schema = create_input_schema_from_json_schema(flat_schema)\n langflow_inputs = schema_to_langflow_inputs(input_schema)\n inputs[tool.name] = langflow_inputs\n except (AttributeError, ValueError, TypeError, KeyError) as e:\n msg = f\"Error getting inputs for tool {getattr(tool, 'name', 'unknown')}: {e!s}\"\n logger.exception(msg)\n continue\n return inputs\n\n def remove_input_schema_from_build_config(\n self, build_config: dict, tool_name: str, input_schema: dict[list[InputTypes], Any]\n ):\n \"\"\"Remove the input schema for the tool from the build config.\"\"\"\n # Keep only schemas that don't belong to the current tool\n input_schema = {k: v for k, v in input_schema.items() if k != tool_name}\n # Remove all inputs from other tools\n for value in input_schema.values():\n for _input in value:\n if _input.name in build_config:\n build_config.pop(_input.name)\n\n def remove_non_default_keys(self, build_config: dict) -> None:\n \"\"\"Remove non-default keys from the build config.\"\"\"\n for key in list(build_config.keys()):\n if key not in self.default_keys:\n build_config.pop(key)\n\n async def _update_tool_config(self, build_config: dict, tool_name: str) -> None:\n \"\"\"Update tool configuration with proper error handling.\"\"\"\n if not self.tools:\n self.tools, build_config[\"mcp_server\"][\"value\"] = await self.update_tool_list()\n\n if not tool_name:\n return\n\n tool_obj = next((tool for tool in self.tools if tool.name == tool_name), None)\n if not tool_obj:\n msg = f\"Tool {tool_name} not found in available tools: {self.tools}\"\n self.remove_non_default_keys(build_config)\n build_config[\"tool\"][\"value\"] = \"\"\n logger.warning(msg)\n return\n\n try:\n # Store current values before removing inputs\n current_values = {}\n for key, value in build_config.items():\n if key not in self.default_keys and isinstance(value, dict) and \"value\" in value:\n current_values[key] = value[\"value\"]\n\n # Get all tool inputs and remove old ones\n input_schema_for_all_tools = self.get_inputs_for_all_tools(self.tools)\n self.remove_input_schema_from_build_config(build_config, tool_name, input_schema_for_all_tools)\n\n # Get and validate new inputs\n self.schema_inputs = await self._validate_schema_inputs(tool_obj)\n if not self.schema_inputs:\n msg = f\"No input parameters to configure for tool '{tool_name}'\"\n logger.info(msg)\n return\n\n # Add new inputs to build config\n for schema_input in self.schema_inputs:\n if not schema_input or not hasattr(schema_input, \"name\"):\n msg = \"Invalid schema input detected, skipping\"\n logger.warning(msg)\n continue\n\n try:\n name = schema_input.name\n input_dict = schema_input.to_dict()\n input_dict.setdefault(\"value\", None)\n input_dict.setdefault(\"required\", True)\n\n build_config[name] = input_dict\n\n # Preserve existing value if the parameter name exists in current_values\n if name in current_values:\n build_config[name][\"value\"] = current_values[name]\n\n except (AttributeError, KeyError, TypeError) as e:\n msg = f\"Error processing schema input {schema_input}: {e!s}\"\n logger.exception(msg)\n continue\n except ValueError as e:\n msg = f\"Schema validation error for tool {tool_name}: {e!s}\"\n logger.exception(msg)\n self.schema_inputs = []\n return\n except (AttributeError, KeyError, TypeError) as e:\n msg = f\"Error updating tool config: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n async def build_output(self) -> DataFrame:\n \"\"\"Build output with improved error handling and validation.\"\"\"\n try:\n self.tools, _ = await self.update_tool_list()\n if self.tool != \"\":\n # Set session context for persistent MCP sessions using Langflow session ID\n session_context = self._get_session_context()\n if session_context:\n self.stdio_client.set_session_context(session_context)\n self.sse_client.set_session_context(session_context)\n\n exec_tool = self._tool_cache[self.tool]\n tool_args = self.get_inputs_for_all_tools(self.tools)[self.tool]\n kwargs = {}\n for arg in tool_args:\n value = getattr(self, arg.name, None)\n if value is not None:\n if isinstance(value, Message):\n kwargs[arg.name] = value.text\n else:\n kwargs[arg.name] = value\n\n unflattened_kwargs = maybe_unflatten_dict(kwargs)\n\n output = await exec_tool.coroutine(**unflattened_kwargs)\n\n tool_content = []\n for item in output.content:\n item_dict = item.model_dump()\n tool_content.append(item_dict)\n return DataFrame(data=tool_content)\n return DataFrame(data=[{\"error\": \"You must select a tool\"}])\n except Exception as e:\n msg = f\"Error in build_output: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n def _get_session_context(self) -> str | None:\n \"\"\"Get the Langflow session ID for MCP session caching.\"\"\"\n # Try to get session ID from the component's execution context\n if hasattr(self, \"graph\") and hasattr(self.graph, \"session_id\"):\n session_id = self.graph.session_id\n # Include server name to ensure different servers get different sessions\n server_name = \"\"\n mcp_server = getattr(self, \"mcp_server\", None)\n if isinstance(mcp_server, dict):\n server_name = mcp_server.get(\"name\", \"\")\n elif mcp_server:\n server_name = str(mcp_server)\n return f\"{session_id}_{server_name}\" if session_id else None\n return None\n\n async def _get_tools(self):\n \"\"\"Get cached tools or update if necessary.\"\"\"\n mcp_server = getattr(self, \"mcp_server\", None)\n if not self._not_load_actions:\n tools, _ = await self.update_tool_list(mcp_server)\n return tools\n return []\n"
2581
+ "value": "from __future__ import annotations\n\nimport asyncio\nimport uuid\nfrom typing import Any\n\nfrom langchain_core.tools import StructuredTool # noqa: TC002\n\nfrom langflow.api.v2.mcp import get_server\nfrom langflow.base.agents.utils import maybe_unflatten_dict, safe_cache_get, safe_cache_set\nfrom langflow.base.mcp.util import (\n MCPSseClient,\n MCPStdioClient,\n create_input_schema_from_json_schema,\n update_tools,\n)\nfrom langflow.custom.custom_component.component_with_cache import ComponentWithCache\nfrom langflow.inputs.inputs import InputTypes # noqa: TC001\nfrom langflow.io import DropdownInput, McpInput, MessageTextInput, Output, SecretStrInput\nfrom langflow.io.schema import flatten_schema, schema_to_langflow_inputs\nfrom langflow.logging import logger\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\n\n# Import get_server from the backend API\nfrom langflow.services.auth.utils import create_user_longterm_token, get_current_user\nfrom langflow.services.database.models.user.crud import get_user_by_id\nfrom langflow.services.deps import get_session, get_settings_service, get_storage_service\n\n\nclass MCPToolsComponent(ComponentWithCache):\n schema_inputs: list = []\n tools: list[StructuredTool] = []\n _not_load_actions: bool = False\n _tool_cache: dict = {}\n _last_selected_server: str | None = None # Cache for the last selected server\n\n def __init__(self, **data) -> None:\n super().__init__(**data)\n # Initialize cache keys to avoid CacheMiss when accessing them\n self._ensure_cache_structure()\n\n # Initialize clients with access to the component cache\n self.stdio_client: MCPStdioClient = MCPStdioClient(component_cache=self._shared_component_cache)\n self.sse_client: MCPSseClient = MCPSseClient(component_cache=self._shared_component_cache)\n\n def _ensure_cache_structure(self):\n \"\"\"Ensure the cache has the required structure.\"\"\"\n # Check if servers key exists and is not CacheMiss\n servers_value = safe_cache_get(self._shared_component_cache, \"servers\")\n if servers_value is None:\n safe_cache_set(self._shared_component_cache, \"servers\", {})\n\n # Check if last_selected_server key exists and is not CacheMiss\n last_server_value = safe_cache_get(self._shared_component_cache, \"last_selected_server\")\n if last_server_value is None:\n safe_cache_set(self._shared_component_cache, \"last_selected_server\", \"\")\n\n default_keys: list[str] = [\n \"code\",\n \"_type\",\n \"tool_mode\",\n \"tool_placeholder\",\n \"mcp_server\",\n \"tool\",\n ]\n\n display_name = \"MCP Tools\"\n description = \"Connect to an MCP server to use its tools.\"\n documentation: str = \"https://docs.langflow.org/mcp-client\"\n icon = \"Mcp\"\n name = \"MCPTools\"\n\n inputs = [\n McpInput(\n name=\"mcp_server\",\n display_name=\"MCP Server\",\n info=\"Select the MCP Server that will be used by this component\",\n real_time_refresh=True,\n ),\n DropdownInput(\n name=\"tool\",\n display_name=\"Tool\",\n options=[],\n value=\"\",\n info=\"Select the tool to execute\",\n show=False,\n required=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n info=\"Placeholder for the tool\",\n value=\"\",\n show=False,\n tool_mode=False,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Langflow API Key\",\n info=\"Langflow API key for authentication when fetching MCP servers and tools.\",\n required=False,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Response\", name=\"response\", method=\"build_output\"),\n ]\n\n async def _validate_schema_inputs(self, tool_obj) -> list[InputTypes]:\n \"\"\"Validate and process schema inputs for a tool.\"\"\"\n try:\n if not tool_obj or not hasattr(tool_obj, \"args_schema\"):\n msg = \"Invalid tool object or missing input schema\"\n raise ValueError(msg)\n\n flat_schema = flatten_schema(tool_obj.args_schema.schema())\n input_schema = create_input_schema_from_json_schema(flat_schema)\n if not input_schema:\n msg = f\"Empty input schema for tool '{tool_obj.name}'\"\n raise ValueError(msg)\n\n schema_inputs = schema_to_langflow_inputs(input_schema)\n if not schema_inputs:\n msg = f\"No input parameters defined for tool '{tool_obj.name}'\"\n logger.warning(msg)\n return []\n\n except Exception as e:\n msg = f\"Error validating schema inputs: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n else:\n return schema_inputs\n\n async def update_tool_list(self, mcp_server_value=None):\n # Accepts mcp_server_value as dict {name, config} or uses self.mcp_server\n mcp_server = mcp_server_value if mcp_server_value is not None else getattr(self, \"mcp_server\", None)\n server_name = None\n server_config_from_value = None\n if isinstance(mcp_server, dict):\n server_name = mcp_server.get(\"name\")\n server_config_from_value = mcp_server.get(\"config\")\n else:\n server_name = mcp_server\n if not server_name:\n self.tools = []\n return [], {\"name\": server_name, \"config\": server_config_from_value}\n\n # Use shared cache if available\n servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n cached = servers_cache.get(server_name) if isinstance(servers_cache, dict) else None\n\n if cached is not None:\n self.tools = cached[\"tools\"]\n self.tool_names = cached[\"tool_names\"]\n self._tool_cache = cached[\"tool_cache\"]\n server_config_from_value = cached[\"config\"]\n return self.tools, {\"name\": server_name, \"config\": server_config_from_value}\n\n try:\n async for db in get_session():\n # TODO: In 1.6, this may need to be removed or adjusted\n # Try to get the super user token, if possible\n if self.api_key:\n current_user = await get_current_user(\n token=None,\n query_param=self.api_key,\n header_param=None,\n db=db,\n )\n else:\n user_id, _ = await create_user_longterm_token(db)\n current_user = await get_user_by_id(db, user_id)\n\n # Try to get server config from DB/API\n server_config = await get_server(\n server_name,\n current_user,\n db,\n storage_service=get_storage_service(),\n settings_service=get_settings_service(),\n )\n\n # If get_server returns empty but we have a config, use it\n if not server_config and server_config_from_value:\n server_config = server_config_from_value\n\n if not server_config:\n self.tools = []\n return [], {\"name\": server_name, \"config\": server_config}\n\n _, tool_list, tool_cache = await update_tools(\n server_name=server_name,\n server_config=server_config,\n mcp_stdio_client=self.stdio_client,\n mcp_sse_client=self.sse_client,\n )\n\n self.tool_names = [tool.name for tool in tool_list if hasattr(tool, \"name\")]\n self._tool_cache = tool_cache\n self.tools = tool_list\n # Cache the result using shared cache\n cache_data = {\n \"tools\": tool_list,\n \"tool_names\": self.tool_names,\n \"tool_cache\": tool_cache,\n \"config\": server_config,\n }\n\n # Safely update the servers cache\n current_servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n if isinstance(current_servers_cache, dict):\n current_servers_cache[server_name] = cache_data\n safe_cache_set(self._shared_component_cache, \"servers\", current_servers_cache)\n\n return tool_list, {\"name\": server_name, \"config\": server_config}\n except (TimeoutError, asyncio.TimeoutError) as e:\n msg = f\"Timeout updating tool list: {e!s}\"\n logger.exception(msg)\n raise TimeoutError(msg) from e\n except Exception as e:\n msg = f\"Error updating tool list: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n async def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict:\n \"\"\"Toggle the visibility of connection-specific fields based on the selected mode.\"\"\"\n try:\n if field_name == \"tool\":\n try:\n if len(self.tools) == 0:\n try:\n self.tools, build_config[\"mcp_server\"][\"value\"] = await self.update_tool_list()\n build_config[\"tool\"][\"options\"] = [tool.name for tool in self.tools]\n build_config[\"tool\"][\"placeholder\"] = \"Select a tool\"\n except (TimeoutError, asyncio.TimeoutError) as e:\n msg = f\"Timeout updating tool list: {e!s}\"\n logger.exception(msg)\n if not build_config[\"tools_metadata\"][\"show\"]:\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"Timeout on MCP server\"\n else:\n build_config[\"tool\"][\"show\"] = False\n except ValueError:\n if not build_config[\"tools_metadata\"][\"show\"]:\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"Error on MCP Server\"\n else:\n build_config[\"tool\"][\"show\"] = False\n\n if field_value == \"\":\n return build_config\n tool_obj = None\n for tool in self.tools:\n if tool.name == field_value:\n tool_obj = tool\n break\n if tool_obj is None:\n msg = f\"Tool {field_value} not found in available tools: {self.tools}\"\n logger.warning(msg)\n return build_config\n await self._update_tool_config(build_config, field_value)\n except Exception as e:\n build_config[\"tool\"][\"options\"] = []\n msg = f\"Failed to update tools: {e!s}\"\n raise ValueError(msg) from e\n else:\n return build_config\n elif field_name == \"mcp_server\":\n if not field_value:\n build_config[\"tool\"][\"show\"] = False\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = \"\"\n build_config[\"tool\"][\"placeholder\"] = \"\"\n build_config[\"tool_placeholder\"][\"tool_mode\"] = False\n self.remove_non_default_keys(build_config)\n return build_config\n\n build_config[\"tool_placeholder\"][\"tool_mode\"] = True\n\n current_server_name = field_value.get(\"name\") if isinstance(field_value, dict) else field_value\n _last_selected_server = safe_cache_get(self._shared_component_cache, \"last_selected_server\", \"\")\n\n # To avoid unnecessary updates, only proceed if the server has actually changed\n if (_last_selected_server in (current_server_name, \"\")) and build_config[\"tool\"][\"show\"]:\n return build_config\n\n # Determine if \"Tool Mode\" is active by checking if the tool dropdown is hidden.\n is_in_tool_mode = build_config[\"tools_metadata\"][\"show\"]\n safe_cache_set(self._shared_component_cache, \"last_selected_server\", current_server_name)\n\n # Check if tools are already cached for this server before clearing\n cached_tools = None\n if current_server_name:\n servers_cache = safe_cache_get(self._shared_component_cache, \"servers\", {})\n if isinstance(servers_cache, dict):\n cached = servers_cache.get(current_server_name)\n if cached is not None:\n cached_tools = cached[\"tools\"]\n self.tools = cached_tools\n self.tool_names = cached[\"tool_names\"]\n self._tool_cache = cached[\"tool_cache\"]\n\n # Only clear tools if we don't have cached tools for the current server\n if not cached_tools:\n self.tools = [] # Clear previous tools only if no cache\n\n self.remove_non_default_keys(build_config) # Clear previous tool inputs\n\n # Only show the tool dropdown if not in tool_mode\n if not is_in_tool_mode:\n build_config[\"tool\"][\"show\"] = True\n if cached_tools:\n # Use cached tools to populate options immediately\n build_config[\"tool\"][\"options\"] = [tool.name for tool in cached_tools]\n build_config[\"tool\"][\"placeholder\"] = \"Select a tool\"\n else:\n # Show loading state only when we need to fetch tools\n build_config[\"tool\"][\"placeholder\"] = \"Loading tools...\"\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"value\"] = uuid.uuid4()\n else:\n # Keep the tool dropdown hidden if in tool_mode\n self._not_load_actions = True\n build_config[\"tool\"][\"show\"] = False\n\n elif field_name == \"tool_mode\":\n build_config[\"tool\"][\"placeholder\"] = \"\"\n build_config[\"tool\"][\"show\"] = not bool(field_value) and bool(build_config[\"mcp_server\"])\n self.remove_non_default_keys(build_config)\n self.tool = build_config[\"tool\"][\"value\"]\n if field_value:\n self._not_load_actions = True\n else:\n build_config[\"tool\"][\"value\"] = uuid.uuid4()\n build_config[\"tool\"][\"options\"] = []\n build_config[\"tool\"][\"show\"] = True\n build_config[\"tool\"][\"placeholder\"] = \"Loading tools...\"\n elif field_name == \"tools_metadata\":\n self._not_load_actions = False\n\n except Exception as e:\n msg = f\"Error in update_build_config: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n else:\n return build_config\n\n def get_inputs_for_all_tools(self, tools: list) -> dict:\n \"\"\"Get input schemas for all tools.\"\"\"\n inputs = {}\n for tool in tools:\n if not tool or not hasattr(tool, \"name\"):\n continue\n try:\n flat_schema = flatten_schema(tool.args_schema.schema())\n input_schema = create_input_schema_from_json_schema(flat_schema)\n langflow_inputs = schema_to_langflow_inputs(input_schema)\n inputs[tool.name] = langflow_inputs\n except (AttributeError, ValueError, TypeError, KeyError) as e:\n msg = f\"Error getting inputs for tool {getattr(tool, 'name', 'unknown')}: {e!s}\"\n logger.exception(msg)\n continue\n return inputs\n\n def remove_input_schema_from_build_config(\n self, build_config: dict, tool_name: str, input_schema: dict[list[InputTypes], Any]\n ):\n \"\"\"Remove the input schema for the tool from the build config.\"\"\"\n # Keep only schemas that don't belong to the current tool\n input_schema = {k: v for k, v in input_schema.items() if k != tool_name}\n # Remove all inputs from other tools\n for value in input_schema.values():\n for _input in value:\n if _input.name in build_config:\n build_config.pop(_input.name)\n\n def remove_non_default_keys(self, build_config: dict) -> None:\n \"\"\"Remove non-default keys from the build config.\"\"\"\n for key in list(build_config.keys()):\n if key not in self.default_keys:\n build_config.pop(key)\n\n async def _update_tool_config(self, build_config: dict, tool_name: str) -> None:\n \"\"\"Update tool configuration with proper error handling.\"\"\"\n if not self.tools:\n self.tools, build_config[\"mcp_server\"][\"value\"] = await self.update_tool_list()\n\n if not tool_name:\n return\n\n tool_obj = next((tool for tool in self.tools if tool.name == tool_name), None)\n if not tool_obj:\n msg = f\"Tool {tool_name} not found in available tools: {self.tools}\"\n self.remove_non_default_keys(build_config)\n build_config[\"tool\"][\"value\"] = \"\"\n logger.warning(msg)\n return\n\n try:\n # Store current values before removing inputs\n current_values = {}\n for key, value in build_config.items():\n if key not in self.default_keys and isinstance(value, dict) and \"value\" in value:\n current_values[key] = value[\"value\"]\n\n # Get all tool inputs and remove old ones\n input_schema_for_all_tools = self.get_inputs_for_all_tools(self.tools)\n self.remove_input_schema_from_build_config(build_config, tool_name, input_schema_for_all_tools)\n\n # Get and validate new inputs\n self.schema_inputs = await self._validate_schema_inputs(tool_obj)\n if not self.schema_inputs:\n msg = f\"No input parameters to configure for tool '{tool_name}'\"\n logger.info(msg)\n return\n\n # Add new inputs to build config\n for schema_input in self.schema_inputs:\n if not schema_input or not hasattr(schema_input, \"name\"):\n msg = \"Invalid schema input detected, skipping\"\n logger.warning(msg)\n continue\n\n try:\n name = schema_input.name\n input_dict = schema_input.to_dict()\n input_dict.setdefault(\"value\", None)\n input_dict.setdefault(\"required\", True)\n\n build_config[name] = input_dict\n\n # Preserve existing value if the parameter name exists in current_values\n if name in current_values:\n build_config[name][\"value\"] = current_values[name]\n\n except (AttributeError, KeyError, TypeError) as e:\n msg = f\"Error processing schema input {schema_input}: {e!s}\"\n logger.exception(msg)\n continue\n except ValueError as e:\n msg = f\"Schema validation error for tool {tool_name}: {e!s}\"\n logger.exception(msg)\n self.schema_inputs = []\n return\n except (AttributeError, KeyError, TypeError) as e:\n msg = f\"Error updating tool config: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n async def build_output(self) -> DataFrame:\n \"\"\"Build output with improved error handling and validation.\"\"\"\n try:\n self.tools, _ = await self.update_tool_list()\n if self.tool != \"\":\n # Set session context for persistent MCP sessions using Langflow session ID\n session_context = self._get_session_context()\n if session_context:\n self.stdio_client.set_session_context(session_context)\n self.sse_client.set_session_context(session_context)\n\n exec_tool = self._tool_cache[self.tool]\n tool_args = self.get_inputs_for_all_tools(self.tools)[self.tool]\n kwargs = {}\n for arg in tool_args:\n value = getattr(self, arg.name, None)\n if value is not None:\n if isinstance(value, Message):\n kwargs[arg.name] = value.text\n else:\n kwargs[arg.name] = value\n\n unflattened_kwargs = maybe_unflatten_dict(kwargs)\n\n output = await exec_tool.coroutine(**unflattened_kwargs)\n\n tool_content = []\n for item in output.content:\n item_dict = item.model_dump()\n tool_content.append(item_dict)\n return DataFrame(data=tool_content)\n return DataFrame(data=[{\"error\": \"You must select a tool\"}])\n except Exception as e:\n msg = f\"Error in build_output: {e!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n def _get_session_context(self) -> str | None:\n \"\"\"Get the Langflow session ID for MCP session caching.\"\"\"\n # Try to get session ID from the component's execution context\n if hasattr(self, \"graph\") and hasattr(self.graph, \"session_id\"):\n session_id = self.graph.session_id\n # Include server name to ensure different servers get different sessions\n server_name = \"\"\n mcp_server = getattr(self, \"mcp_server\", None)\n if isinstance(mcp_server, dict):\n server_name = mcp_server.get(\"name\", \"\")\n elif mcp_server:\n server_name = str(mcp_server)\n return f\"{session_id}_{server_name}\" if session_id else None\n return None\n\n async def _get_tools(self):\n \"\"\"Get cached tools or update if necessary.\"\"\"\n mcp_server = getattr(self, \"mcp_server\", None)\n if not self._not_load_actions:\n tools, _ = await self.update_tool_list(mcp_server)\n return tools\n return []\n"
2565
2582
  },
2566
2583
  "mcp_server": {
2567
2584
  "_input_type": "McpInput",
@@ -122,7 +122,9 @@ async def update_params_with_load_from_db_fields(
122
122
  try:
123
123
  key = await custom_component.get_variable(name=params[field], field=field, session=session)
124
124
  except ValueError as e:
125
- if any(reason in str(e) for reason in ["User id is not set", "variable not found."]):
125
+ if "User id is not set" in str(e):
126
+ raise
127
+ if "variable not found." in str(e) and not fallback_to_env_vars:
126
128
  raise
127
129
  logger.debug(str(e))
128
130
  key = None
langflow/main.py CHANGED
@@ -54,6 +54,15 @@ _tasks: list[asyncio.Task] = []
54
54
  MAX_PORT = 65535
55
55
 
56
56
 
57
+ async def log_exception_to_telemetry(exc: Exception, context: str) -> None:
58
+ """Helper to safely log exceptions to telemetry without raising."""
59
+ try:
60
+ telemetry_service = get_telemetry_service()
61
+ await telemetry_service.log_exception(exc, context)
62
+ except (httpx.HTTPError, asyncio.QueueFull):
63
+ logger.warning(f"Failed to log {context} exception to telemetry")
64
+
65
+
57
66
  class RequestCancelledMiddleware(BaseHTTPMiddleware):
58
67
  def __init__(self, app) -> None:
59
68
  super().__init__(app)
@@ -111,10 +120,9 @@ async def load_bundles_with_error_handling():
111
120
 
112
121
 
113
122
  def get_lifespan(*, fix_migration=False, version=None):
114
- telemetry_service = get_telemetry_service()
115
-
116
123
  @asynccontextmanager
117
124
  async def lifespan(_app: FastAPI):
125
+ telemetry_service = get_telemetry_service()
118
126
  configure(async_file=True)
119
127
 
120
128
  # Startup message
@@ -208,6 +216,8 @@ def get_lifespan(*, fix_migration=False, version=None):
208
216
  except Exception as exc:
209
217
  if "langflow migration --fix" not in str(exc):
210
218
  logger.exception(exc)
219
+
220
+ await log_exception_to_telemetry(exc, "lifespan")
211
221
  raise
212
222
  finally:
213
223
  # Clean shutdown with progress indicator
@@ -256,12 +266,16 @@ def get_lifespan(*, fix_migration=False, version=None):
256
266
  except (sqlalchemy.exc.OperationalError, sqlalchemy.exc.DBAPIError) as e:
257
267
  # Case where the database connection is closed during shutdown
258
268
  logger.warning(f"Database teardown failed due to closed connection: {e}")
269
+ await log_exception_to_telemetry(e, "lifespan_database_teardown")
259
270
  except asyncio.CancelledError:
260
271
  # Swallow this - it's normal during shutdown
261
272
  logger.debug("Teardown cancelled during shutdown.")
273
+ raise
262
274
  except Exception as e: # noqa: BLE001
263
275
  logger.exception(f"Unhandled error during cleanup: {e}")
264
276
 
277
+ await log_exception_to_telemetry(e, "lifespan_cleanup")
278
+
265
279
  try:
266
280
  await asyncio.shield(asyncio.sleep(0.1)) # let logger flush async logs
267
281
  await asyncio.shield(logger.complete())
@@ -380,6 +394,9 @@ def create_app():
380
394
  content={"message": str(exc.detail)},
381
395
  )
382
396
  logger.error(f"unhandled error: {exc}", exc_info=exc)
397
+
398
+ await log_exception_to_telemetry(exc, "handler")
399
+
383
400
  return JSONResponse(
384
401
  status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
385
402
  content={"message": str(exc)},
@@ -1,7 +1,7 @@
1
1
  from datetime import datetime, timezone
2
2
  from uuid import UUID, uuid4
3
3
 
4
- from sqlmodel import Field, SQLModel
4
+ from sqlmodel import Field, SQLModel, UniqueConstraint
5
5
 
6
6
  from langflow.schema.serialize import UUIDstr
7
7
 
@@ -9,9 +9,11 @@ from langflow.schema.serialize import UUIDstr
9
9
  class File(SQLModel, table=True): # type: ignore[call-arg]
10
10
  id: UUIDstr = Field(default_factory=uuid4, primary_key=True)
11
11
  user_id: UUID = Field(foreign_key="user.id")
12
- name: str = Field(unique=True, nullable=False)
12
+ name: str = Field(nullable=False)
13
13
  path: str = Field(nullable=False)
14
14
  size: int = Field(nullable=False)
15
15
  provider: str | None = Field(default=None)
16
16
  created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
17
17
  updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
18
+
19
+ __table_args__ = (UniqueConstraint("name", "user_id"),)
@@ -156,7 +156,9 @@ class DatabaseService(Service):
156
156
  "check_same_thread": False,
157
157
  "timeout": settings.db_connect_timeout,
158
158
  }
159
-
159
+ # For PostgreSQL, set the timezone to UTC
160
+ if settings.database_url and settings.database_url.startswith(("postgresql", "postgres")):
161
+ return {"options": "-c timezone=utc"}
160
162
  return {}
161
163
 
162
164
  def on_connection(self, dbapi_connection, _connection_record) -> None:
@@ -36,3 +36,10 @@ class ComponentPayload(BaseModel):
36
36
  component_seconds: int = Field(serialization_alias="componentSeconds")
37
37
  component_success: bool = Field(serialization_alias="componentSuccess")
38
38
  component_error_message: str | None = Field(serialization_alias="componentErrorMessage")
39
+
40
+
41
+ class ExceptionPayload(BaseModel):
42
+ exception_type: str = Field(serialization_alias="exceptionType")
43
+ exception_message: str = Field(serialization_alias="exceptionMessage")
44
+ exception_context: str = Field(serialization_alias="exceptionContext") # "lifespan" or "handler"
45
+ stack_trace_hash: str | None = Field(None, serialization_alias="stackTraceHash") # Hash for grouping
@@ -1,8 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
+ import hashlib
4
5
  import os
5
6
  import platform
7
+ import traceback
6
8
  from datetime import datetime, timezone
7
9
  from typing import TYPE_CHECKING
8
10
 
@@ -13,6 +15,7 @@ from langflow.services.base import Service
13
15
  from langflow.services.telemetry.opentelemetry import OpenTelemetry
14
16
  from langflow.services.telemetry.schema import (
15
17
  ComponentPayload,
18
+ ExceptionPayload,
16
19
  PlaygroundPayload,
17
20
  RunPayload,
18
21
  ShutdownPayload,
@@ -65,6 +68,7 @@ class TelemetryService(Service):
65
68
  url = f"{self.base_url}"
66
69
  if path:
67
70
  url = f"{url}/{path}"
71
+
68
72
  try:
69
73
  payload_dict = payload.model_dump(by_alias=True, exclude_none=True, exclude_unset=True)
70
74
  response = await self.client.get(url, params=payload_dict)
@@ -119,6 +123,27 @@ class TelemetryService(Service):
119
123
  async def log_package_component(self, payload: ComponentPayload) -> None:
120
124
  await self._queue_event((self.send_telemetry_data, payload, "component"))
121
125
 
126
+ async def log_exception(self, exc: Exception, context: str) -> None:
127
+ """Log unhandled exceptions to telemetry.
128
+
129
+ Args:
130
+ exc: The exception that occurred
131
+ context: Context where exception occurred ("lifespan" or "handler")
132
+ """
133
+ # Get the stack trace and hash it for grouping similar exceptions
134
+ stack_trace = traceback.format_exception(type(exc), exc, exc.__traceback__)
135
+ stack_trace_str = "".join(stack_trace)
136
+ # Hash stack trace for grouping similar exceptions, truncated to save space
137
+ stack_trace_hash = hashlib.sha256(stack_trace_str.encode()).hexdigest()[:16]
138
+
139
+ payload = ExceptionPayload(
140
+ exception_type=exc.__class__.__name__,
141
+ exception_message=str(exc)[:500], # Truncate long messages
142
+ exception_context=context,
143
+ stack_trace_hash=stack_trace_hash,
144
+ )
145
+ await self._queue_event((self.send_telemetry_data, payload, "exception"))
146
+
122
147
  def start(self) -> None:
123
148
  if self.running or self.do_not_track:
124
149
  return
@@ -275,10 +275,19 @@ class TracingService(Service):
275
275
  @staticmethod
276
276
  def _cleanup_inputs(inputs: dict[str, Any]):
277
277
  inputs = inputs.copy()
278
- for key in inputs:
279
- if "api_key" in key:
280
- inputs[key] = "*****" # avoid logging api_keys for security reasons
281
- return inputs
278
+ sensitive_keywords = {"api_key", "password", "server_url"}
279
+
280
+ def _mask(obj: Any):
281
+ if isinstance(obj, dict):
282
+ return {
283
+ k: "*****" if any(word in k.lower() for word in sensitive_keywords) else _mask(v)
284
+ for k, v in obj.items()
285
+ }
286
+ if isinstance(obj, list):
287
+ return [_mask(i) for i in obj]
288
+ return obj
289
+
290
+ return _mask(inputs)
282
291
 
283
292
  def _start_component_traces(
284
293
  self,
@@ -344,6 +353,7 @@ class TracingService(Service):
344
353
  if component._vertex:
345
354
  trace_id = component._vertex.id
346
355
  trace_type = component.trace_type
356
+ inputs = self._cleanup_inputs(inputs)
347
357
  component_trace_context = ComponentTraceContext(
348
358
  trace_id, trace_name, trace_type, component._vertex, inputs, metadata
349
359
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langflow-base-nightly
3
- Version: 0.5.0.dev33
3
+ Version: 0.5.0.dev35
4
4
  Summary: A Python package with a built-in web application
5
5
  Project-URL: Repository, https://github.com/langflow-ai/langflow
6
6
  Project-URL: Documentation, https://docs.langflow.org