langflow-base-nightly 0.5.0.dev33__py3-none-any.whl → 0.5.0.dev34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. langflow/api/v1/endpoints.py +1 -1
  2. langflow/base/composio/composio_base.py +1092 -126
  3. langflow/components/composio/__init__.py +24 -0
  4. langflow/components/composio/composio_api.py +116 -136
  5. langflow/components/composio/dropbox_compnent.py +11 -0
  6. langflow/components/composio/github_composio.py +1 -639
  7. langflow/components/composio/gmail_composio.py +26 -394
  8. langflow/components/composio/googlecalendar_composio.py +2 -778
  9. langflow/components/composio/googlemeet_composio.py +11 -0
  10. langflow/components/composio/googletasks_composio.py +8 -0
  11. langflow/components/composio/linear_composio.py +11 -0
  12. langflow/components/composio/outlook_composio.py +1 -755
  13. langflow/components/composio/reddit_composio.py +11 -0
  14. langflow/components/composio/slack_composio.py +1 -576
  15. langflow/components/composio/slackbot_composio.py +11 -0
  16. langflow/components/composio/supabase_composio.py +11 -0
  17. langflow/components/composio/todoist_composio.py +11 -0
  18. langflow/components/composio/youtube_composio.py +11 -0
  19. langflow/custom/utils.py +30 -7
  20. langflow/frontend/assets/{SlackIcon-Bikuxo8x.js → SlackIcon-B260Qg_R.js} +1 -1
  21. langflow/frontend/assets/{Wikipedia-B6aCFf5-.js → Wikipedia-BB2mbgyd.js} +1 -1
  22. langflow/frontend/assets/{Wolfram-CekL_M-a.js → Wolfram-DytXC9hF.js} +1 -1
  23. langflow/frontend/assets/{index-D1RgjMON.js → index-3TJWUdmx.js} +1 -1
  24. langflow/frontend/assets/{index-B4xLpgbM.js → index-3qMh9x6K.js} +1 -1
  25. langflow/frontend/assets/{index-DEuXrfXH.js → index-3uOAA_XX.js} +1 -1
  26. langflow/frontend/assets/{index-DTJX3yQa.js → index-4eRtaV45.js} +1 -1
  27. langflow/frontend/assets/index-7xXgqu09.js +1 -0
  28. langflow/frontend/assets/{index-BRNhftot.js → index-AY5Dm2mG.js} +1 -1
  29. langflow/frontend/assets/{index-4Tl3Nxdo.js → index-AlJ7td-D.js} +1 -1
  30. langflow/frontend/assets/{index-D2nHdRne.js → index-B-c82Fnu.js} +1 -1
  31. langflow/frontend/assets/{index-C3RZz8WE.js → index-B2ggrBuR.js} +1 -1
  32. langflow/frontend/assets/{index-in188l0A.js → index-B536IPXH.js} +1 -1
  33. langflow/frontend/assets/{index-CP0tFKwN.js → index-B5ed-sAv.js} +1 -1
  34. langflow/frontend/assets/{index-CAzSTGAM.js → index-B8TlNgn-.js} +1 -1
  35. langflow/frontend/assets/{index-09CVJwsY.js → index-B8y58M9b.js} +1 -1
  36. langflow/frontend/assets/{index-B9uOBe6Y.js → index-B9Mo3ndZ.js} +1 -1
  37. langflow/frontend/assets/{index-DAJafn16.js → index-BCK-ZyIh.js} +1 -1
  38. langflow/frontend/assets/{index-Cy-ZEfWh.js → index-BEDxAk3N.js} +1 -1
  39. langflow/frontend/assets/{index-DbmqjLy6.js → index-BEKoRwsX.js} +1 -1
  40. langflow/frontend/assets/{index-BcqeL_f4.js → index-BIkqesA-.js} +1 -1
  41. langflow/frontend/assets/{index-7x3wNZ-4.js → index-BJrY2Fiu.js} +1 -1
  42. langflow/frontend/assets/{index-Iamzh9ZT.js → index-BKvKC-12.js} +1 -1
  43. langflow/frontend/assets/{index-COqjpsdy.js → index-BLROcaSz.js} +1 -1
  44. langflow/frontend/assets/{index-BRwkzs92.js → index-BNbWMmAV.js} +1 -1
  45. langflow/frontend/assets/{index-C_UkF-RJ.js → index-BOEf7-ty.js} +1 -1
  46. langflow/frontend/assets/index-BOYTBrh9.js +1 -0
  47. langflow/frontend/assets/{index-DDcpxWU4.js → index-BQB-iDYl.js} +1 -1
  48. langflow/frontend/assets/{index-Crq_yhkG.js → index-BRWNIt9F.js} +1 -1
  49. langflow/frontend/assets/{index-DmaQAn3K.js → index-BVHvIhT5.js} +1 -1
  50. langflow/frontend/assets/{index-Cs_jt3dj.js → index-BVtf6m9S.js} +1 -1
  51. langflow/frontend/assets/{index-T2jJOG85.js → index-BWq9GTzt.js} +1 -1
  52. langflow/frontend/assets/{index-Dz0r9Idb.js → index-BXMhmvTj.js} +1 -1
  53. langflow/frontend/assets/{index-eJwu5YEi.js → index-Ba3RTMXI.js} +1 -1
  54. langflow/frontend/assets/{index-xVx59Op-.js → index-Baka5dKE.js} +1 -1
  55. langflow/frontend/assets/{index-DnusMCK1.js → index-BbsND1Qg.js} +1 -1
  56. langflow/frontend/assets/index-BcgB3rXH.js +1 -0
  57. langflow/frontend/assets/{index-CmiRgF_-.js → index-BdIWbCEL.js} +1 -1
  58. langflow/frontend/assets/{index-BllNr21U.js → index-BdYgKk1d.js} +1 -1
  59. langflow/frontend/assets/{index-BIKbxmIh.js → index-BeNby7qF.js} +1 -1
  60. langflow/frontend/assets/{index-CUe1ivTn.js → index-BejHxU5W.js} +1 -1
  61. langflow/frontend/assets/{index-CVphnxXi.js → index-Bisa4IQF.js} +1 -1
  62. langflow/frontend/assets/{index-Cr2oy5K2.js → index-BjENqyKe.js} +1 -1
  63. langflow/frontend/assets/{index-CEn_71Wk.js → index-BlBl2tvQ.js} +1 -1
  64. langflow/frontend/assets/{index-DOb9c2bf.js → index-BnLT29qW.js} +1 -1
  65. langflow/frontend/assets/{index-BRizlHaN.js → index-BqUeOc7Y.js} +1 -1
  66. langflow/frontend/assets/{index-D7nFs6oq.js → index-BsBWP-Dh.js} +1 -1
  67. langflow/frontend/assets/{index-BlRTHXW5.js → index-BtJ2o21k.js} +1 -1
  68. langflow/frontend/assets/{index-AOX7bbjJ.js → index-BxWXWRmZ.js} +1 -1
  69. langflow/frontend/assets/{index-B20KmxhS.js → index-BxkZkBgQ.js} +1 -1
  70. langflow/frontend/assets/{index-DoFlaGDx.js → index-Bxml6wXu.js} +1 -1
  71. langflow/frontend/assets/{index-B9KRIJFi.js → index-ByFXr9Iq.js} +1 -1
  72. langflow/frontend/assets/{index-CY6LUi4V.js → index-C2Xd7UkR.js} +1 -1
  73. langflow/frontend/assets/index-C76aBV_h.js +1 -0
  74. langflow/frontend/assets/{index-9gkURvG2.js → index-C7V5U9yH.js} +1 -1
  75. langflow/frontend/assets/{index-BDmbsLY2.js → index-C7x9R_Yo.js} +1 -1
  76. langflow/frontend/assets/{index-DI0zAExi.js → index-C8KD3LPb.js} +1 -1
  77. langflow/frontend/assets/{index-DzDNhMMW.js → index-C9N80hP8.js} +1 -1
  78. langflow/frontend/assets/{index-6GWpsedd.js → index-CDFLVFB4.js} +1 -1
  79. langflow/frontend/assets/{index-pkOi9P45.js → index-CF4dtI6S.js} +1 -1
  80. langflow/frontend/assets/{index-CdwjD4IX.js → index-CG7cp0nD.js} +1 -1
  81. langflow/frontend/assets/{index-J0pvFqLk.js → index-CHFO5O4g.js} +1 -1
  82. langflow/frontend/assets/{index-5G402gB8.js → index-CJwYfDBz.js} +1 -1
  83. langflow/frontend/assets/{index-BzCjyHto.js → index-CMGZGIx_.js} +1 -1
  84. langflow/frontend/assets/{index-Bm7a2vMS.js → index-COL0eiWI.js} +1 -1
  85. langflow/frontend/assets/{index-JHCxbvlW.js → index-CWWo2zOA.js} +1 -1
  86. langflow/frontend/assets/{index-C7wDSVVH.js → index-C_1RBTul.js} +1 -1
  87. langflow/frontend/assets/{index-BIjUtp6d.js → index-Ccb5B8zG.js} +1 -1
  88. langflow/frontend/assets/{index-yIh6-LZT.js → index-Cd5zuUUK.js} +1 -1
  89. langflow/frontend/assets/{index-CPIdMJkX.js → index-CkQ-bJ4G.js} +1 -1
  90. langflow/frontend/assets/{index-TRyDa01A.js → index-CkSzjCqM.js} +1 -1
  91. langflow/frontend/assets/{index-CSRizl2S.js → index-CoUlHbtg.js} +1 -1
  92. langflow/frontend/assets/index-Cpgkb0Q3.js +1 -0
  93. langflow/frontend/assets/{index-Cp7Pmn03.js → index-CqDUqHfd.js} +1 -1
  94. langflow/frontend/assets/{index-CGVDXKtN.js → index-Ct9_T9ox.js} +1 -1
  95. langflow/frontend/assets/{index-BwlYjc56.js → index-CvQ0w8Pj.js} +1 -1
  96. langflow/frontend/assets/{index-DkJCCraf.js → index-CwIxqYlT.js} +1 -1
  97. langflow/frontend/assets/{index-Bgd7yLoW.js → index-Cx__T92e.js} +1 -1
  98. langflow/frontend/assets/{index-RveG4dl9.js → index-D-zkHcob.js} +1 -1
  99. langflow/frontend/assets/{index-DVV_etfW.js → index-D0HmkH0H.js} +1 -1
  100. langflow/frontend/assets/{index-CglSqvB5.js → index-D0s9f6Re.js} +1 -1
  101. langflow/frontend/assets/{index-J98sU-1p.js → index-D5PeCofu.js} +1 -1
  102. langflow/frontend/assets/{index-BJIsQS8D.js → index-D87Zw62M.js} +1 -1
  103. langflow/frontend/assets/{index-FYcoJPMP.js → index-D9eflZfP.js} +1 -1
  104. langflow/frontend/assets/{index-DJs6FoYC.js → index-DDNNv4C0.js} +1 -1
  105. langflow/frontend/assets/index-DHlEwAxb.js +1 -0
  106. langflow/frontend/assets/{index-DqDQk0Cu.js → index-DIqSyDVO.js} +1 -1
  107. langflow/frontend/assets/{index-DOI0ceS-.js → index-DK8vNpXK.js} +1 -1
  108. langflow/frontend/assets/{index-D29n5mus.js → index-DKEXZFUO.js} +1 -1
  109. langflow/frontend/assets/{index-dfaj9-hY.js → index-DPX6X_bw.js} +1 -1
  110. langflow/frontend/assets/{index-CgbINWS8.js → index-DS1EgA10.js} +1 -1
  111. langflow/frontend/assets/{index-C69gdJqw.js → index-DS9I4y48.js} +1 -1
  112. langflow/frontend/assets/{index-B2EmwqKj.js → index-DWkMJnbd.js} +1 -1
  113. langflow/frontend/assets/{index-CIYzjH2y.js → index-DWr_zPkx.js} +1 -1
  114. langflow/frontend/assets/{index-D-HTZ68O.js → index-DX7XsAcx.js} +1 -1
  115. langflow/frontend/assets/{index-Cq30cQcP.js → index-DZzbmg3J.js} +1 -1
  116. langflow/frontend/assets/{index-BZCt_UnJ.js → index-DasrI03Y.js} +1 -1
  117. langflow/frontend/assets/index-DdzVmJHE.js +1 -0
  118. langflow/frontend/assets/{index-DmvjdU1N.js → index-DhzEUXfr.js} +1 -1
  119. langflow/frontend/assets/{index-B_ytx_iA.js → index-DpJiH-Rk.js} +1 -1
  120. langflow/frontend/assets/{index-Cyk3aCmP.js → index-DpQKtcXu.js} +1 -1
  121. langflow/frontend/assets/{index-DrvRK4_i.js → index-Dpz3oBf5.js} +1 -1
  122. langflow/frontend/assets/{index-DF0oWRdd.js → index-DqSH4x-R.js} +1 -1
  123. langflow/frontend/assets/{index-DX_InNVT.js → index-DtJyCbzF.js} +1 -1
  124. langflow/frontend/assets/{index-B4AtFbkN.js → index-Du9aJK7m.js} +1 -1
  125. langflow/frontend/assets/{index-qXcoVIRo.js → index-DuAeoC-H.js} +1 -1
  126. langflow/frontend/assets/{index-D7Vx6mgS.js → index-DxIs8VSp.js} +1 -1
  127. langflow/frontend/assets/{index-U7J1YiWE.js → index-DyJDHm2D.js} +1 -1
  128. langflow/frontend/assets/{index-1MEYR1La.js → index-DzeIsaBm.js} +1 -1
  129. langflow/frontend/assets/{index-Cbwk3f-p.js → index-DztLFiip.js} +1 -1
  130. langflow/frontend/assets/{index-C_2G2ZqJ.js → index-GODbXlHC.js} +1 -1
  131. langflow/frontend/assets/{index-2vQdFIK_.js → index-G_U_kPAd.js} +1 -1
  132. langflow/frontend/assets/{index-DS4F_Phe.js → index-IFGgPiye.js} +1 -1
  133. langflow/frontend/assets/{index-5hW8VleF.js → index-LrMzDsq9.js} +1 -1
  134. langflow/frontend/assets/{index-L7FKc9QN.js → index-R7q8cAek.js} +1 -1
  135. langflow/frontend/assets/{index-BRE8A4Q_.js → index-Uq2ij_SS.js} +1 -1
  136. langflow/frontend/assets/{index-Bn4HAVDG.js → index-VHmUHUUU.js} +1 -1
  137. langflow/frontend/assets/{index-VO-pk-Hg.js → index-VZnN0P6C.js} +1 -1
  138. langflow/frontend/assets/{index-Dy7ehgeV.js → index-VcXZzovW.js} +1 -1
  139. langflow/frontend/assets/{index-DNS4La1f.js → index-Ym6gz0T6.js} +1 -1
  140. langflow/frontend/assets/{index-UI2ws3qp.js → index-ci4XHjbJ.js} +176 -176
  141. langflow/frontend/assets/{index-DlMAYATX.js → index-dkS0ek2S.js} +1 -1
  142. langflow/frontend/assets/{index-Dc0p1Oxl.js → index-hOkEW3JP.js} +1 -1
  143. langflow/frontend/assets/{index-KnS52ylc.js → index-js8ceOaP.js} +1 -1
  144. langflow/frontend/assets/{index-DtCsjX48.js → index-lKEJpUsF.js} +1 -1
  145. langflow/frontend/assets/{index-BO4fl1uU.js → index-mBjJYD9q.js} +1 -1
  146. langflow/frontend/assets/{index-C_K6Tof7.js → index-r1LZg-PY.js} +1 -1
  147. langflow/frontend/assets/index-rcdQpNcU.js +1 -0
  148. langflow/frontend/assets/{index-_3qag0I4.js → index-sS6XLk3j.js} +1 -1
  149. langflow/frontend/assets/{index-C6P0vvSP.js → index-tOy_uloT.js} +1 -1
  150. langflow/frontend/assets/lazyIconImports-Bh1TFfvH.js +2 -0
  151. langflow/frontend/assets/{use-post-add-user-Bt6vZvvT.js → use-post-add-user-HN0rRnhv.js} +1 -1
  152. langflow/frontend/index.html +1 -1
  153. langflow/interface/initialize/loading.py +3 -1
  154. langflow/main.py +19 -2
  155. langflow/services/database/service.py +3 -1
  156. langflow/services/telemetry/schema.py +7 -0
  157. langflow/services/telemetry/service.py +25 -0
  158. langflow/services/tracing/service.py +14 -4
  159. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev34.dist-info}/METADATA +1 -1
  160. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev34.dist-info}/RECORD +162 -145
  161. langflow/frontend/assets/lazyIconImports-kvf_Kak2.js +0 -2
  162. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev34.dist-info}/WHEEL +0 -0
  163. {langflow_base_nightly-0.5.0.dev33.dist-info → langflow_base_nightly-0.5.0.dev34.dist-info}/entry_points.txt +0 -0
@@ -1,31 +1,69 @@
1
+ import copy
1
2
  import re
2
- from abc import abstractmethod
3
3
  from typing import Any
4
4
 
5
- from composio.client.collections import AppAuthScheme
6
- from composio.client.exceptions import NoItemsFound
7
- from composio.exceptions import ApiKeyError
8
- from composio_langchain import ComposioToolSet
5
+ from composio import Composio
6
+ from composio_langchain import LangchainProvider
9
7
  from langchain_core.tools import Tool
10
8
 
9
+ from langflow.base.mcp.util import create_input_schema_from_json_schema
11
10
  from langflow.custom.custom_component.component import Component
12
11
  from langflow.inputs.inputs import (
13
12
  AuthInput,
13
+ FileInput,
14
+ InputTypes,
14
15
  MessageTextInput,
15
16
  SecretStrInput,
16
17
  SortableListInput,
17
18
  )
18
19
  from langflow.io import Output
20
+ from langflow.io.schema import flatten_schema, schema_to_langflow_inputs
19
21
  from langflow.logging import logger
20
22
  from langflow.schema.data import Data
21
23
  from langflow.schema.dataframe import DataFrame
22
24
  from langflow.schema.message import Message
23
25
 
24
26
 
27
+ def _patch_graph_clean_null_input_types() -> None:
28
+ """Monkey-patch Graph._create_vertex to clean legacy templates."""
29
+ try:
30
+ from langflow.graph.graph.base import Graph
31
+
32
+ original_create_vertex = Graph._create_vertex
33
+
34
+ def _create_vertex_with_cleanup(self, frontend_data):
35
+ try:
36
+ node_id: str | None = frontend_data.get("id") if isinstance(frontend_data, dict) else None
37
+ if node_id and "Composio" in node_id:
38
+ template = frontend_data.get("data", {}).get("node", {}).get("template", {})
39
+ if isinstance(template, dict):
40
+ for field_cfg in template.values():
41
+ if isinstance(field_cfg, dict) and field_cfg.get("input_types") is None:
42
+ field_cfg["input_types"] = []
43
+ except (AttributeError, TypeError, KeyError) as e:
44
+ logger.debug(f"Composio template cleanup encountered error: {e}")
45
+
46
+ return original_create_vertex(self, frontend_data)
47
+
48
+ # Patch only once
49
+ if getattr(Graph, "_composio_patch_applied", False) is False:
50
+ Graph._create_vertex = _create_vertex_with_cleanup # type: ignore[method-assign]
51
+ Graph._composio_patch_applied = True # type: ignore[attr-defined]
52
+ logger.debug("Applied Composio template cleanup patch to Graph._create_vertex")
53
+
54
+ except (AttributeError, TypeError) as e:
55
+ logger.debug(f"Failed to apply Composio Graph patch: {e}")
56
+
57
+
58
+ # Apply the patch at import time
59
+ _patch_graph_clean_null_input_types()
60
+
61
+
25
62
  class ComposioBaseComponent(Component):
26
63
  """Base class for Composio components with common functionality."""
27
64
 
28
- # Common inputs that all Composio components will need
65
+ default_tools_limit: int = 5
66
+
29
67
  _base_inputs = [
30
68
  MessageTextInput(
31
69
  name="entity_id",
@@ -45,9 +83,10 @@ class ComposioBaseComponent(Component):
45
83
  name="auth_link",
46
84
  value="",
47
85
  auth_tooltip="Please insert a valid Composio API Key.",
86
+ show=False,
48
87
  ),
49
88
  SortableListInput(
50
- name="action",
89
+ name="action_button",
51
90
  display_name="Action",
52
91
  placeholder="Select action",
53
92
  options=[],
@@ -60,27 +99,40 @@ class ComposioBaseComponent(Component):
60
99
  limit=1,
61
100
  ),
62
101
  ]
63
- _all_fields: set[str] = set()
64
- _bool_variables: set[str] = set()
65
- _actions_data: dict[str, dict[str, Any]] = {}
66
- _default_tools: set[str] = set()
67
- _display_to_key_map: dict[str, str] = {}
68
- _key_to_display_map: dict[str, str] = {}
69
- _sanitized_names: dict[str, str] = {}
102
+
70
103
  _name_sanitizer = re.compile(r"[^a-zA-Z0-9_-]")
71
104
 
105
+ # Class-level caches
106
+ _actions_cache: dict[str, dict[str, Any]] = {}
107
+ _action_schema_cache: dict[str, dict[str, Any]] = {}
108
+
72
109
  outputs = [
73
110
  Output(name="dataFrame", display_name="DataFrame", method="as_dataframe"),
74
111
  ]
75
112
 
113
+ inputs = list(_base_inputs)
114
+
115
+ def __init__(self, **kwargs):
116
+ """Initialize instance variables to prevent shared state between components."""
117
+ super().__init__(**kwargs)
118
+ self._all_fields: set[str] = set()
119
+ self._bool_variables: set[str] = set()
120
+ self._actions_data: dict[str, dict[str, Any]] = {}
121
+ self._default_tools: set[str] = set()
122
+ self._display_to_key_map: dict[str, str] = {}
123
+ self._key_to_display_map: dict[str, str] = {}
124
+ self._sanitized_names: dict[str, str] = {}
125
+ self._action_schemas: dict[str, Any] = {}
126
+
76
127
  def as_message(self) -> Message:
77
128
  result = self.execute_action()
129
+ if result is None:
130
+ return Message(text="Action execution returned no result")
78
131
  return Message(text=str(result))
79
132
 
80
133
  def as_dataframe(self) -> DataFrame:
81
134
  result = self.execute_action()
82
- # If the result is a dict, pandas will raise ValueError: If using all scalar values, you must pass an index
83
- # So we need to make sure the result is a list of dicts
135
+
84
136
  if isinstance(result, dict):
85
137
  result = [result]
86
138
  return DataFrame(result)
@@ -115,13 +167,13 @@ class ComposioBaseComponent(Component):
115
167
  return set()
116
168
  return set(self._actions_data[action_key]["action_fields"]) if action_key in self._actions_data else set()
117
169
 
118
- def _build_wrapper(self) -> ComposioToolSet:
119
- """Build the Composio toolset wrapper."""
170
+ def _build_wrapper(self) -> Composio:
171
+ """Build the Composio wrapper."""
120
172
  try:
121
173
  if not self.api_key:
122
174
  msg = "Composio API Key is required"
123
175
  raise ValueError(msg)
124
- return ComposioToolSet(api_key=self.api_key)
176
+ return Composio(api_key=self.api_key, provider=LangchainProvider())
125
177
 
126
178
  except ValueError as e:
127
179
  logger.error(f"Error building Composio wrapper: {e}")
@@ -157,128 +209,940 @@ class ComposioBaseComponent(Component):
157
209
  else:
158
210
  build_config[field]["value"] = ""
159
211
 
212
+ def _populate_actions_data(self):
213
+ """Fetch the list of actions for the toolkit and build helper maps."""
214
+ if self._actions_data:
215
+ return
216
+
217
+ # Try to load from the class-level cache
218
+ toolkit_slug = self.app_name.lower()
219
+ if toolkit_slug in self.__class__._actions_cache:
220
+ # Deep-copy so that any mutation on this instance does not affect the
221
+ # cached master copy.
222
+ self._actions_data = copy.deepcopy(self.__class__._actions_cache[toolkit_slug])
223
+ self._action_schemas = copy.deepcopy(self.__class__._action_schema_cache.get(toolkit_slug, {}))
224
+ logger.debug(f"Loaded actions for {toolkit_slug} from in-process cache")
225
+ return
226
+
227
+ api_key = getattr(self, "api_key", None)
228
+ if not api_key:
229
+ logger.warning("API key is missing. Cannot populate actions data.")
230
+ return
231
+
232
+ try:
233
+ composio = self._build_wrapper()
234
+ toolkit_slug = self.app_name.lower()
235
+
236
+ raw_tools = composio.tools.get_raw_composio_tools(toolkits=[toolkit_slug], limit=999)
237
+
238
+ if not raw_tools:
239
+ msg = f"Toolkit '{toolkit_slug}' not found or has no available tools"
240
+ raise ValueError(msg)
241
+
242
+ for raw_tool in raw_tools:
243
+ try:
244
+ # Convert raw_tool to dict-like structure
245
+ tool_dict = raw_tool.__dict__ if hasattr(raw_tool, "__dict__") else raw_tool
246
+
247
+ if not tool_dict:
248
+ logger.warning(f"Tool is None or empty: {raw_tool}")
249
+ continue
250
+
251
+ action_key = tool_dict.get("slug")
252
+ if not action_key:
253
+ logger.warning(f"Action key (slug) is missing in tool: {tool_dict}")
254
+ continue
255
+
256
+ # Human-friendly display name
257
+ display_name = tool_dict.get("name") or tool_dict.get("display_name")
258
+ if not display_name:
259
+ # Better fallback: convert GMAIL_SEND_EMAIL to "Send Email"
260
+ # Remove app prefix and convert to title case
261
+ clean_name = action_key
262
+ clean_name = clean_name.removeprefix(f"{self.app_name.upper()}_")
263
+ # Convert underscores to spaces and title case
264
+ display_name = clean_name.replace("_", " ").title()
265
+
266
+ # Build list of parameter names and track bool fields
267
+ parameters_schema = tool_dict.get("input_parameters", {})
268
+ if parameters_schema is None:
269
+ logger.warning(f"Parameters schema is None for action key: {action_key}")
270
+ # Still add the action but with empty fields
271
+ self._action_schemas[action_key] = tool_dict
272
+ self._actions_data[action_key] = {
273
+ "display_name": display_name,
274
+ "action_fields": [],
275
+ "file_upload_fields": set(),
276
+ }
277
+ continue
278
+
279
+ try:
280
+ # Special handling for unusual schema structures
281
+ if not isinstance(parameters_schema, dict):
282
+ # Try to convert if it's a model object
283
+ if hasattr(parameters_schema, "model_dump"):
284
+ parameters_schema = parameters_schema.model_dump()
285
+ elif hasattr(parameters_schema, "__dict__"):
286
+ parameters_schema = parameters_schema.__dict__
287
+ else:
288
+ logger.warning(f"Cannot process parameters schema for {action_key}, skipping")
289
+ self._action_schemas[action_key] = tool_dict
290
+ self._actions_data[action_key] = {
291
+ "display_name": display_name,
292
+ "action_fields": [],
293
+ "file_upload_fields": set(),
294
+ }
295
+ continue
296
+
297
+ # Validate parameters_schema has required structure before flattening
298
+ if not parameters_schema.get("properties") and not parameters_schema.get("$defs"):
299
+ # Create a minimal valid schema to avoid errors
300
+ parameters_schema = {"type": "object", "properties": {}}
301
+
302
+ # Sanitize the schema before passing to flatten_schema
303
+ # Handle case where 'required' is explicitly None (causes "'NoneType' object is not iterable")
304
+ if parameters_schema.get("required") is None:
305
+ parameters_schema = parameters_schema.copy() # Don't modify the original
306
+ parameters_schema["required"] = []
307
+
308
+ try:
309
+ # Preserve original descriptions before flattening to restore if lost
310
+ original_descriptions = {}
311
+ original_props = parameters_schema.get("properties", {})
312
+ for prop_name, prop_schema in original_props.items():
313
+ if isinstance(prop_schema, dict) and "description" in prop_schema:
314
+ original_descriptions[prop_name] = prop_schema["description"]
315
+
316
+ flat_schema = flatten_schema(parameters_schema)
317
+
318
+ # Restore lost descriptions in flattened schema
319
+ if flat_schema and isinstance(flat_schema, dict) and "properties" in flat_schema:
320
+ flat_props = flat_schema["properties"]
321
+ for field_name, field_schema in flat_props.items():
322
+ # Check if this field lost its description during flattening
323
+ if isinstance(field_schema, dict) and "description" not in field_schema:
324
+ # Try to find the original description
325
+ # Handle array fields like bcc[0] -> bcc
326
+ base_field_name = field_name.replace("[0]", "")
327
+ if base_field_name in original_descriptions:
328
+ field_schema["description"] = original_descriptions[base_field_name]
329
+ elif field_name in original_descriptions:
330
+ field_schema["description"] = original_descriptions[field_name]
331
+ except (KeyError, TypeError, ValueError):
332
+ self._action_schemas[action_key] = tool_dict
333
+ self._actions_data[action_key] = {
334
+ "display_name": display_name,
335
+ "action_fields": [],
336
+ "file_upload_fields": set(),
337
+ }
338
+ continue
339
+
340
+ if flat_schema is None:
341
+ logger.warning(f"Flat schema is None for action key: {action_key}")
342
+ # Still add the action but with empty fields so the UI doesn't break
343
+ self._action_schemas[action_key] = tool_dict
344
+ self._actions_data[action_key] = {
345
+ "display_name": display_name,
346
+ "action_fields": [],
347
+ "file_upload_fields": set(),
348
+ }
349
+ continue
350
+
351
+ # Extract field names and detect file upload fields during parsing
352
+ raw_action_fields = list(flat_schema.get("properties", {}).keys())
353
+ action_fields = []
354
+ attachment_related_found = False
355
+ file_upload_fields = set()
356
+
357
+ # Check original schema properties for file_uploadable fields
358
+ original_props = parameters_schema.get("properties", {})
359
+ for field_name, field_schema in original_props.items():
360
+ if isinstance(field_schema, dict):
361
+ clean_field_name = field_name.replace("[0]", "")
362
+ # Check direct file_uploadable attribute
363
+ if field_schema.get("file_uploadable") is True:
364
+ file_upload_fields.add(clean_field_name)
365
+
366
+ # Check anyOf structures (like OUTLOOK_OUTLOOK_SEND_EMAIL)
367
+ if "anyOf" in field_schema:
368
+ for any_of_item in field_schema["anyOf"]:
369
+ if isinstance(any_of_item, dict) and any_of_item.get("file_uploadable") is True:
370
+ file_upload_fields.add(clean_field_name)
371
+
372
+ for field in raw_action_fields:
373
+ clean_field = field.replace("[0]", "")
374
+ # Check if this field is attachment-related
375
+ if clean_field.lower().startswith("attachment."):
376
+ attachment_related_found = True
377
+ continue # Skip individual attachment fields
378
+
379
+ # Handle conflicting field names - rename user_id to avoid conflicts with entity_id
380
+ if clean_field == "user_id":
381
+ clean_field = f"{self.app_name}_user_id"
382
+
383
+ action_fields.append(clean_field)
384
+
385
+ # Add consolidated attachment field if we found attachment-related fields
386
+ if attachment_related_found:
387
+ action_fields.append("attachment")
388
+ file_upload_fields.add("attachment") # Attachment fields are also file upload fields
389
+
390
+ # Track boolean parameters so we can coerce them later
391
+ properties = flat_schema.get("properties", {})
392
+ if properties:
393
+ for p_name, p_schema in properties.items():
394
+ if isinstance(p_schema, dict) and p_schema.get("type") == "boolean":
395
+ # Use cleaned field name for boolean tracking
396
+ clean_field_name = p_name.replace("[0]", "")
397
+ self._bool_variables.add(clean_field_name)
398
+
399
+ self._action_schemas[action_key] = tool_dict
400
+ self._actions_data[action_key] = {
401
+ "display_name": display_name,
402
+ "action_fields": action_fields,
403
+ "file_upload_fields": file_upload_fields,
404
+ }
405
+
406
+ except (KeyError, TypeError, ValueError) as flatten_error:
407
+ logger.error(f"flatten_schema failed for {action_key}: {flatten_error}")
408
+ self._action_schemas[action_key] = tool_dict
409
+ self._actions_data[action_key] = {
410
+ "display_name": display_name,
411
+ "action_fields": [],
412
+ "file_upload_fields": set(),
413
+ }
414
+ continue
415
+
416
+ except ValueError as e:
417
+ logger.warning(f"Failed processing Composio tool for action {raw_tool}: {e}")
418
+
419
+ # Helper look-ups used elsewhere
420
+ self._all_fields = {f for d in self._actions_data.values() for f in d["action_fields"]}
421
+ self._build_action_maps()
422
+
423
+ # Cache actions for this toolkit so subsequent component instances
424
+ # can reuse them without hitting the Composio API again.
425
+ self.__class__._actions_cache[toolkit_slug] = copy.deepcopy(self._actions_data)
426
+ self.__class__._action_schema_cache[toolkit_slug] = copy.deepcopy(self._action_schemas)
427
+
428
+ except ValueError as e:
429
+ logger.debug(f"Could not populate Composio actions for {self.app_name}: {e}")
430
+
431
+ def _validate_schema_inputs(self, action_key: str) -> list[InputTypes]:
432
+ """Convert the JSON schema for *action_key* into Langflow input objects."""
433
+ # Skip validation for default/placeholder values
434
+ if action_key in ("disabled", "placeholder", ""):
435
+ logger.debug(f"Skipping schema validation for placeholder value: {action_key}")
436
+ return []
437
+
438
+ schema_dict = self._action_schemas.get(action_key)
439
+ if not schema_dict:
440
+ logger.warning(f"No schema found for action key: {action_key}")
441
+ return []
442
+
443
+ try:
444
+ parameters_schema = schema_dict.get("input_parameters", {})
445
+ if parameters_schema is None:
446
+ logger.warning(f"Parameters schema is None for action key: {action_key}")
447
+ return []
448
+
449
+ # Check if parameters_schema has the expected structure
450
+ if not isinstance(parameters_schema, dict):
451
+ logger.warning(
452
+ f"Parameters schema is not a dict for action key: {action_key}, got: {type(parameters_schema)}"
453
+ )
454
+ return []
455
+
456
+ # Validate parameters_schema has required structure before flattening
457
+ if not parameters_schema.get("properties") and not parameters_schema.get("$defs"):
458
+ # Create a minimal valid schema to avoid errors
459
+ parameters_schema = {"type": "object", "properties": {}}
460
+
461
+ # Sanitize the schema before passing to flatten_schema
462
+ # Handle case where 'required' is explicitly None (causes "'NoneType' object is not iterable")
463
+ if parameters_schema.get("required") is None:
464
+ parameters_schema = parameters_schema.copy() # Don't modify the original
465
+ parameters_schema["required"] = []
466
+
467
+ try:
468
+ # Preserve original descriptions before flattening to restore if lost
469
+ original_descriptions = {}
470
+ original_props = parameters_schema.get("properties", {})
471
+ for prop_name, prop_schema in original_props.items():
472
+ if isinstance(prop_schema, dict) and "description" in prop_schema:
473
+ original_descriptions[prop_name] = prop_schema["description"]
474
+
475
+ flat_schema = flatten_schema(parameters_schema)
476
+
477
+ # Restore lost descriptions in flattened schema
478
+ if flat_schema and isinstance(flat_schema, dict) and "properties" in flat_schema:
479
+ flat_props = flat_schema["properties"]
480
+ for field_name, field_schema in flat_props.items():
481
+ # Check if this field lost its description during flattening
482
+ if isinstance(field_schema, dict) and "description" not in field_schema:
483
+ # Try to find the original description
484
+ # Handle array fields like bcc[0] -> bcc
485
+ base_field_name = field_name.replace("[0]", "")
486
+ if base_field_name in original_descriptions:
487
+ field_schema["description"] = original_descriptions[base_field_name]
488
+ elif field_name in original_descriptions:
489
+ field_schema["description"] = original_descriptions[field_name]
490
+ except (KeyError, TypeError, ValueError) as flatten_error:
491
+ logger.error(f"flatten_schema failed for {action_key}: {flatten_error}")
492
+ return []
493
+
494
+ if flat_schema is None:
495
+ logger.warning(f"Flat schema is None for action key: {action_key}")
496
+ return []
497
+
498
+ # Additional check for flat_schema structure
499
+ if not isinstance(flat_schema, dict):
500
+ logger.warning(f"Flat schema is not a dict for action key: {action_key}, got: {type(flat_schema)}")
501
+ return []
502
+
503
+ # Ensure flat_schema has the expected structure for create_input_schema_from_json_schema
504
+ if flat_schema.get("type") != "object":
505
+ logger.warning(f"Flat schema for {action_key} is not of type 'object', got: {flat_schema.get('type')}")
506
+ # Fix the schema type if it's missing
507
+ flat_schema["type"] = "object"
508
+
509
+ if "properties" not in flat_schema:
510
+ flat_schema["properties"] = {}
511
+
512
+ # Clean up field names - remove [0] suffixes from array fields
513
+ cleaned_properties = {}
514
+ attachment_related_fields = set() # Track fields that are attachment-related
515
+
516
+ for field_name, field_schema in flat_schema.get("properties", {}).items():
517
+ # Remove [0] suffix from field names (e.g., "bcc[0]" -> "bcc", "cc[0]" -> "cc")
518
+ clean_field_name = field_name.replace("[0]", "")
519
+
520
+ # Check if this field is attachment-related (contains "attachment." prefix)
521
+ if clean_field_name.lower().startswith("attachment."):
522
+ attachment_related_fields.add(clean_field_name)
523
+ # Don't add individual attachment sub-fields to the schema
524
+ continue
525
+
526
+ # Handle conflicting field names - rename user_id to avoid conflicts with entity_id
527
+ if clean_field_name == "user_id":
528
+ clean_field_name = f"{self.app_name}_user_id"
529
+ # Update the field schema description to reflect the name change
530
+ field_schema_copy = field_schema.copy()
531
+ field_schema_copy["description"] = (
532
+ f"User ID for {self.app_name.title()}: " + field_schema["description"]
533
+ )
534
+ else:
535
+ # Use the original field schema for all other fields
536
+ field_schema_copy = field_schema
537
+
538
+ # Preserve the full schema information, not just the type
539
+ cleaned_properties[clean_field_name] = field_schema_copy
540
+
541
+ # If we found attachment-related fields, add a single "attachment" field
542
+ if attachment_related_fields:
543
+ # Create a generic attachment field schema
544
+ attachment_schema = {
545
+ "type": "string",
546
+ "description": "File attachment for the email",
547
+ "title": "Attachment",
548
+ }
549
+ cleaned_properties["attachment"] = attachment_schema
550
+
551
+ # Update the flat schema with cleaned field names
552
+ flat_schema["properties"] = cleaned_properties
553
+
554
+ # Also update required fields to match cleaned names
555
+ if flat_schema.get("required"):
556
+ cleaned_required = [field.replace("[0]", "") for field in flat_schema["required"]]
557
+ flat_schema["required"] = cleaned_required
558
+
559
+ input_schema = create_input_schema_from_json_schema(flat_schema)
560
+ if input_schema is None:
561
+ logger.warning(f"Input schema is None for action key: {action_key}")
562
+ return []
563
+
564
+ # Additional safety check before calling schema_to_langflow_inputs
565
+ if not hasattr(input_schema, "model_fields"):
566
+ logger.warning(f"Input schema for {action_key} does not have model_fields attribute")
567
+ return []
568
+
569
+ if input_schema.model_fields is None:
570
+ logger.warning(f"Input schema model_fields is None for {action_key}")
571
+ return []
572
+
573
+ result = schema_to_langflow_inputs(input_schema)
574
+
575
+ # Process inputs to handle attachment fields and set advanced status
576
+ if result:
577
+ processed_inputs = []
578
+ required_fields_set = set(flat_schema.get("required", []))
579
+
580
+ # Get file upload fields from stored action data
581
+ file_upload_fields = self._actions_data.get(action_key, {}).get("file_upload_fields", set())
582
+ if attachment_related_fields: # If we consolidated attachment fields
583
+ file_upload_fields = file_upload_fields | {"attachment"}
584
+
585
+ for inp in result:
586
+ if hasattr(inp, "name") and inp.name is not None:
587
+ # Check if this specific field is a file upload field
588
+ if inp.name.lower() in file_upload_fields or inp.name.lower() == "attachment":
589
+ # Replace with FileInput for file upload fields
590
+ file_input = FileInput(
591
+ name=inp.name,
592
+ display_name=getattr(inp, "display_name", inp.name.replace("_", " ").title()),
593
+ required=inp.name in required_fields_set,
594
+ advanced=inp.name not in required_fields_set,
595
+ info=getattr(inp, "info", "Upload file for this field"),
596
+ show=True,
597
+ file_types=[
598
+ "csv",
599
+ "txt",
600
+ "doc",
601
+ "docx",
602
+ "xls",
603
+ "xlsx",
604
+ "pdf",
605
+ "png",
606
+ "jpg",
607
+ "jpeg",
608
+ "gif",
609
+ "zip",
610
+ "rar",
611
+ "ppt",
612
+ "pptx",
613
+ ],
614
+ )
615
+ processed_inputs.append(file_input)
616
+ else:
617
+ # Ensure proper display_name and info are set for regular fields
618
+ if not hasattr(inp, "display_name") or not inp.display_name:
619
+ inp.display_name = inp.name.replace("_", " ").title()
620
+
621
+ # Preserve description from schema if available
622
+ field_schema = flat_schema.get("properties", {}).get(inp.name, {})
623
+ schema_description = field_schema.get("description")
624
+ current_info = getattr(inp, "info", None)
625
+
626
+ # Use schema description if available, otherwise keep current info or create from name
627
+ if schema_description:
628
+ inp.info = schema_description
629
+ elif not current_info:
630
+ # Fallback: create a basic description from the field name if no description exists
631
+ inp.info = f"{inp.name.replace('_', ' ').title()} field"
632
+
633
+ # Set advanced status for non-file-upload fields
634
+ if inp.name not in required_fields_set:
635
+ inp.advanced = True
636
+
637
+ # Skip entity_id being mapped to user_id parameter
638
+ if inp.name == "user_id" and getattr(self, "entity_id", None) == getattr(
639
+ inp, "value", None
640
+ ):
641
+ continue
642
+
643
+ processed_inputs.append(inp)
644
+ else:
645
+ processed_inputs.append(inp)
646
+
647
+ return processed_inputs
648
+ return result # noqa: TRY300
649
+ except ValueError as e:
650
+ logger.warning(f"Error generating inputs for {action_key}: {e}")
651
+ return []
652
+
653
+ def _get_inputs_for_all_actions(self) -> dict[str, list[InputTypes]]:
654
+ """Return a mapping action_key → list[InputTypes] for every action."""
655
+ result: dict[str, list[InputTypes]] = {}
656
+ for key in self._actions_data:
657
+ result[key] = self._validate_schema_inputs(key)
658
+ return result
659
+
660
+ def _remove_inputs_from_build_config(self, build_config: dict, keep_for_action: str) -> None:
661
+ """Remove parameter UI fields that belong to other actions."""
662
+ protected_keys = {"code", "entity_id", "api_key", "auth_link", "action_button", "tool_mode"}
663
+
664
+ for action_key, lf_inputs in self._get_inputs_for_all_actions().items():
665
+ if action_key == keep_for_action:
666
+ continue
667
+ for inp in lf_inputs:
668
+ if inp.name is not None and inp.name not in protected_keys:
669
+ build_config.pop(inp.name, None)
670
+
671
+ def _update_action_config(self, build_config: dict, selected_value: Any) -> None:
672
+ """Add or update parameter input fields for the chosen action."""
673
+ if not selected_value:
674
+ return
675
+
676
+ # The UI passes either a list with dict [{name: display_name}] OR the raw key
677
+ if isinstance(selected_value, list) and selected_value:
678
+ display_name = selected_value[0]["name"]
679
+ else:
680
+ display_name = selected_value
681
+
682
+ action_key = self.desanitize_action_name(display_name)
683
+
684
+ # Skip validation for default/placeholder values
685
+ if action_key in ("disabled", "placeholder", ""):
686
+ logger.debug(f"Skipping action config update for placeholder value: {action_key}")
687
+ return
688
+
689
+ lf_inputs = self._validate_schema_inputs(action_key)
690
+
691
+ # First remove inputs belonging to other actions
692
+ self._remove_inputs_from_build_config(build_config, action_key)
693
+
694
+ # Add / update the inputs for this action
695
+ for inp in lf_inputs:
696
+ if inp.name is not None:
697
+ inp_dict = inp.to_dict() if hasattr(inp, "to_dict") else inp.__dict__.copy()
698
+
699
+ # Ensure input_types is always a list
700
+ if not isinstance(inp_dict.get("input_types"), list):
701
+ inp_dict["input_types"] = []
702
+
703
+ inp_dict.setdefault("show", True) # visible once action selected
704
+ # Preserve previously entered value if user already filled something
705
+ if inp.name in build_config:
706
+ existing_val = build_config[inp.name].get("value")
707
+ inp_dict.setdefault("value", existing_val)
708
+ build_config[inp.name] = inp_dict
709
+
710
+ # Ensure _all_fields includes new ones
711
+ self._all_fields.update({i.name for i in lf_inputs if i.name is not None})
712
+
713
+ def _is_tool_mode_enabled(self) -> bool:
714
+ """Check if tool_mode is currently enabled."""
715
+ return getattr(self, "tool_mode", False)
716
+
717
+ def _set_action_visibility(self, build_config: dict, *, force_show: bool | None = None) -> None:
718
+ """Set action field visibility based on tool_mode state or forced value."""
719
+ if force_show is not None:
720
+ build_config["action_button"]["show"] = force_show
721
+ else:
722
+ # When tool_mode is enabled, hide action field
723
+ build_config["action_button"]["show"] = not self._is_tool_mode_enabled()
724
+
725
+ def create_new_auth_config(self, app_name: str) -> str:
726
+ """Create a new auth config for the given app name."""
727
+ composio = self._build_wrapper()
728
+ auth_config = composio.auth_configs.create(toolkit=app_name, options={"type": "use_composio_managed_auth"})
729
+ return auth_config.id
730
+
731
+ def _initiate_connection(self, app_name: str) -> tuple[str, str]:
732
+ """Initiate OAuth connection and return (redirect_url, connection_id)."""
733
+ try:
734
+ composio = self._build_wrapper()
735
+
736
+ auth_configs = composio.auth_configs.list(toolkit_slug=app_name)
737
+ if len(auth_configs.items) == 0:
738
+ auth_config_id = self.create_new_auth_config(app_name)
739
+ else:
740
+ auth_config_id = None
741
+ for auth_config in auth_configs.items:
742
+ if auth_config.auth_scheme == "OAUTH2":
743
+ auth_config_id = auth_config.id
744
+
745
+ auth_config_id = auth_configs.items[0].id
746
+
747
+ connection_request = composio.connected_accounts.initiate(
748
+ user_id=self.entity_id, auth_config_id=auth_config_id
749
+ )
750
+
751
+ redirect_url = getattr(connection_request, "redirect_url", None)
752
+ connection_id = getattr(connection_request, "id", None)
753
+
754
+ if not redirect_url or not redirect_url.startswith(("http://", "https://")):
755
+ msg = "Invalid redirect URL received from Composio"
756
+ raise ValueError(msg)
757
+
758
+ if not connection_id:
759
+ msg = "No connection ID received from Composio"
760
+ raise ValueError(msg)
761
+
762
+ logger.info(f"OAuth connection initiated for {app_name}: {redirect_url} (ID: {connection_id})")
763
+ return redirect_url, connection_id # noqa: TRY300
764
+
765
+ except Exception as e:
766
+ logger.error(f"Error initiating connection for {app_name}: {e}")
767
+ msg = f"Failed to initiate OAuth connection: {e}"
768
+ raise ValueError(msg) from e
769
+
770
+ def _check_connection_status_by_id(self, connection_id: str) -> str | None:
771
+ """Check status of a specific connection by ID. Returns status or None if not found."""
772
+ try:
773
+ composio = self._build_wrapper()
774
+ connection = composio.connected_accounts.get(nanoid=connection_id)
775
+ status = getattr(connection, "status", None)
776
+ logger.info(f"Connection {connection_id} status: {status}")
777
+ except (ValueError, ConnectionError) as e:
778
+ logger.error(f"Error checking connection {connection_id}: {e}")
779
+ return None
780
+ else:
781
+ return status
782
+
783
+ def _find_active_connection_for_app(self, app_name: str) -> tuple[str, str] | None:
784
+ """Find any ACTIVE connection for this app/user. Returns (connection_id, status) or None."""
785
+ try:
786
+ composio = self._build_wrapper()
787
+ connection_list = composio.connected_accounts.list(
788
+ user_ids=[self.entity_id], toolkit_slugs=[app_name.lower()]
789
+ )
790
+
791
+ if connection_list and hasattr(connection_list, "items") and connection_list.items:
792
+ for connection in connection_list.items:
793
+ connection_id = getattr(connection, "id", None)
794
+ connection_status = getattr(connection, "status", None)
795
+ if connection_status == "ACTIVE" and connection_id:
796
+ logger.info(f"Found existing ACTIVE connection for {app_name}: {connection_id}")
797
+ return connection_id, connection_status
798
+
799
+ except (ValueError, ConnectionError) as e:
800
+ logger.error(f"Error finding active connection for {app_name}: {e}")
801
+ return None
802
+ else:
803
+ return None
804
+
805
+ def _disconnect_specific_connection(self, connection_id: str) -> None:
806
+ """Disconnect a specific Composio connection by ID."""
807
+ try:
808
+ composio = self._build_wrapper()
809
+ composio.connected_accounts.delete(nanoid=connection_id)
810
+ logger.info(f"✅ Disconnected specific connection: {connection_id}")
811
+
812
+ except Exception as e:
813
+ logger.error(f"Error disconnecting connection {connection_id}: {e}")
814
+ msg = f"Failed to disconnect connection {connection_id}: {e}"
815
+ raise ValueError(msg) from e
816
+
160
817
  def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict:
161
- """Optimized build config updates."""
818
+ """Update build config for auth and action selection."""
819
+ # Clean any legacy None values that may still be present
820
+ for _fconfig in build_config.values():
821
+ if isinstance(_fconfig, dict) and _fconfig.get("input_types") is None:
822
+ _fconfig["input_types"] = []
823
+
824
+ # BULLETPROOF tool_mode checking - check all possible places where tool_mode could be stored
825
+ instance_tool_mode = getattr(self, "tool_mode", False) if hasattr(self, "tool_mode") else False
826
+
827
+ # Check build_config for tool_mode in multiple possible structures
828
+ build_config_tool_mode = False
829
+ if "tool_mode" in build_config:
830
+ tool_mode_config = build_config["tool_mode"]
831
+ if isinstance(tool_mode_config, dict):
832
+ build_config_tool_mode = tool_mode_config.get("value", False)
833
+ else:
834
+ build_config_tool_mode = bool(tool_mode_config)
835
+
836
+ # If this is a tool_mode change, update BOTH instance variable AND build_config
162
837
  if field_name == "tool_mode":
163
- build_config["action"]["show"] = not field_value
838
+ self.tool_mode = field_value
839
+ instance_tool_mode = field_value
840
+ # CRITICAL: Store tool_mode state in build_config so it persists
841
+ if "tool_mode" not in build_config:
842
+ build_config["tool_mode"] = {}
843
+ if isinstance(build_config["tool_mode"], dict):
844
+ build_config["tool_mode"]["value"] = field_value
845
+ build_config_tool_mode = field_value
846
+
847
+ # Current tool_mode is True if ANY source indicates it's enabled
848
+ current_tool_mode = instance_tool_mode or build_config_tool_mode or (field_name == "tool_mode" and field_value)
849
+
850
+ # CRITICAL: Ensure dynamic action metadata is available whenever we have an API key
851
+ # This must happen BEFORE any early returns to ensure tools are always loaded
852
+ api_key_available = hasattr(self, "api_key") and self.api_key
853
+
854
+ # Check if we need to populate actions - but also check cache availability
855
+ actions_available = bool(self._actions_data)
856
+ toolkit_slug = getattr(self, "app_name", "").lower()
857
+ cached_actions_available = toolkit_slug in self.__class__._actions_cache
858
+
859
+ should_populate = False
860
+
861
+ if (field_name == "api_key" and field_value) or (
862
+ api_key_available and not actions_available and not cached_actions_available
863
+ ):
864
+ should_populate = True
865
+ elif api_key_available and not actions_available and cached_actions_available:
866
+ self._populate_actions_data()
867
+
868
+ if should_populate:
869
+ logger.info(f"Populating actions data for {getattr(self, 'app_name', 'unknown')}...")
870
+ self._populate_actions_data()
871
+ logger.info(f"Actions populated: {len(self._actions_data)} actions found")
872
+
873
+ # CRITICAL: Set action options if we have actions (either from fresh population or cache)
874
+ if self._actions_data:
875
+ self._build_action_maps()
876
+ build_config["action_button"]["options"] = [
877
+ {"name": self.sanitize_action_name(action), "metadata": action} for action in self._actions_data
878
+ ]
879
+ logger.info(f"Action options set in build_config: {len(build_config['action_button']['options'])} options")
880
+ else:
881
+ build_config["action_button"]["options"] = []
882
+ logger.warning("No actions found, setting empty options")
883
+
884
+ # clear stored connection_id when api_key is changed
885
+ if field_name == "api_key" and field_value:
886
+ stored_connection_before = build_config.get("auth_link", {}).get("connection_id")
887
+ if "auth_link" in build_config and "connection_id" in build_config["auth_link"]:
888
+ build_config["auth_link"].pop("connection_id", None)
889
+ build_config["auth_link"]["value"] = "connect"
890
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
891
+ logger.info(f"Cleared stored connection_id '{stored_connection_before}' due to API key change")
892
+ else:
893
+ logger.info("DEBUG: EARLY No stored connection_id to clear on API key change")
894
+
895
+ # Handle disconnect operations when tool mode is enabled
896
+ if field_name == "auth_link" and field_value == "disconnect":
897
+ try:
898
+ # Get the specific connection ID that's currently being used
899
+ stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
900
+ if stored_connection_id:
901
+ self._disconnect_specific_connection(stored_connection_id)
902
+ else:
903
+ # No connection ID stored - nothing to disconnect
904
+ logger.warning("No connection ID found to disconnect")
905
+ build_config["auth_link"]["value"] = "connect"
906
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
907
+ return build_config
908
+ except (ValueError, ConnectionError) as e:
909
+ logger.error(f"Error disconnecting: {e}")
910
+ build_config["auth_link"]["value"] = "error"
911
+ build_config["auth_link"]["auth_tooltip"] = f"Disconnect failed: {e!s}"
912
+ return build_config
913
+ else:
914
+ build_config["auth_link"]["value"] = "connect"
915
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
916
+ build_config["auth_link"].pop("connection_id", None) # Clear stored connection ID
917
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
918
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
919
+ return build_config
920
+
921
+ # Handle connection initiation when tool mode is enabled
922
+ if field_name == "auth_link" and isinstance(field_value, dict):
923
+ try:
924
+ toolkit_slug = self.app_name.lower()
925
+
926
+ # First check if we already have an ACTIVE connection
927
+ existing_active = self._find_active_connection_for_app(self.app_name)
928
+ if existing_active:
929
+ connection_id, _ = existing_active
930
+ build_config["auth_link"]["value"] = "validated"
931
+ build_config["auth_link"]["auth_tooltip"] = "Disconnect"
932
+ build_config["auth_link"]["connection_id"] = connection_id
933
+ build_config["action_button"]["helper_text"] = ""
934
+ build_config["action_button"]["helper_text_metadata"] = {}
935
+ logger.info(f"Using existing ACTIVE connection {connection_id} for {toolkit_slug}")
936
+ return build_config
937
+
938
+ # Check if we have a stored connection ID with INITIATED status
939
+ stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
940
+ if stored_connection_id:
941
+ # Check status of existing connection
942
+ status = self._check_connection_status_by_id(stored_connection_id)
943
+ if status == "INITIATED":
944
+ # Get redirect URL from stored connection
945
+ try:
946
+ composio = self._build_wrapper()
947
+ connection = composio.connected_accounts.get(nanoid=stored_connection_id)
948
+ state = getattr(connection, "state", None)
949
+ if state and hasattr(state, "val"):
950
+ redirect_url = getattr(state.val, "redirect_url", None)
951
+ if redirect_url:
952
+ build_config["auth_link"]["value"] = redirect_url
953
+ logger.info(f"Reusing existing OAuth URL for {toolkit_slug}: {redirect_url}")
954
+ return build_config
955
+ except (AttributeError, ValueError, ConnectionError) as e:
956
+ logger.debug(f"Could not retrieve connection {stored_connection_id}: {e}")
957
+ # Continue to create new connection below
958
+
959
+ # Create new OAuth connection ONLY if we truly have no usable connection yet
960
+ if existing_active is None and not (stored_connection_id and status in ("ACTIVE", "INITIATED")):
961
+ try:
962
+ redirect_url, connection_id = self._initiate_connection(toolkit_slug)
963
+ build_config["auth_link"]["value"] = redirect_url
964
+ build_config["auth_link"]["connection_id"] = connection_id # Store connection ID
965
+ logger.info(f"New OAuth URL created for {toolkit_slug}: {redirect_url}")
966
+ except (ValueError, ConnectionError) as e:
967
+ logger.error(f"Error creating OAuth connection: {e}")
968
+ build_config["auth_link"]["value"] = "connect"
969
+ build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
970
+ else:
971
+ return build_config
972
+ else:
973
+ # We already have a usable connection; no new OAuth request
974
+ build_config["auth_link"]["auth_tooltip"] = "Disconnect"
975
+
976
+ except (ValueError, ConnectionError) as e:
977
+ logger.error(f"Error in connection initiation: {e}")
978
+ build_config["auth_link"]["value"] = "connect"
979
+ build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
980
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
981
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
982
+ return build_config
983
+
984
+ # Check for ACTIVE connections and update status accordingly (tool mode)
985
+ if hasattr(self, "api_key") and self.api_key:
986
+ stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
987
+ active_connection_id = None
988
+
989
+ # First try to check stored connection ID
990
+ if stored_connection_id:
991
+ status = self._check_connection_status_by_id(stored_connection_id)
992
+ if status == "ACTIVE":
993
+ active_connection_id = stored_connection_id
994
+
995
+ # If no stored connection or stored connection is not ACTIVE, find any ACTIVE connection
996
+ if not active_connection_id:
997
+ active_connection = self._find_active_connection_for_app(self.app_name)
998
+ if active_connection:
999
+ active_connection_id, _ = active_connection
1000
+ # Store the found active connection ID for future use
1001
+ if "auth_link" not in build_config:
1002
+ build_config["auth_link"] = {}
1003
+ build_config["auth_link"]["connection_id"] = active_connection_id
1004
+
1005
+ if active_connection_id:
1006
+ # Show validated connection status
1007
+ build_config["auth_link"]["value"] = "validated"
1008
+ build_config["auth_link"]["auth_tooltip"] = "Disconnect"
1009
+ build_config["action_button"]["helper_text"] = ""
1010
+ build_config["action_button"]["helper_text_metadata"] = {}
1011
+ else:
1012
+ build_config["auth_link"]["value"] = "connect"
1013
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
1014
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1015
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
1016
+
1017
+ # CRITICAL: If tool_mode is enabled from ANY source, immediately hide action field and return
1018
+ if current_tool_mode:
1019
+ build_config["action_button"]["show"] = False
1020
+
1021
+ # CRITICAL: Hide ALL action parameter fields when tool mode is enabled
164
1022
  for field in self._all_fields:
165
- build_config[field]["show"] = False
1023
+ if field in build_config:
1024
+ build_config[field]["show"] = False
1025
+
1026
+ # Also hide any other action-related fields that might be in build_config
1027
+ for field_name_in_config in build_config: # noqa: PLC0206
1028
+ # Skip base fields like api_key, tool_mode, action, etc.
1029
+ if (
1030
+ field_name_in_config not in ["api_key", "tool_mode", "action_button", "auth_link", "entity_id"]
1031
+ and isinstance(build_config[field_name_in_config], dict)
1032
+ and "show" in build_config[field_name_in_config]
1033
+ ):
1034
+ build_config[field_name_in_config]["show"] = False
1035
+
1036
+ # ENSURE tool_mode state is preserved in build_config for future calls
1037
+ if "tool_mode" not in build_config:
1038
+ build_config["tool_mode"] = {"value": True}
1039
+ elif isinstance(build_config["tool_mode"], dict):
1040
+ build_config["tool_mode"]["value"] = True
1041
+ # Don't proceed with any other logic that might override this
1042
+ return build_config
1043
+
1044
+ if field_name == "tool_mode":
1045
+ if field_value is True:
1046
+ build_config["action_button"]["show"] = False # Hide action field when tool mode is enabled
1047
+ for field in self._all_fields:
1048
+ build_config[field]["show"] = False # Update show status for all fields based on tool mode
1049
+ elif field_value is False:
1050
+ build_config["action_button"]["show"] = True # Show action field when tool mode is disabled
1051
+ for field in self._all_fields:
1052
+ build_config[field]["show"] = True # Update show status for all fields based on tool mode
166
1053
  return build_config
167
1054
 
168
- if field_name == "action":
1055
+ if field_name == "action_button":
1056
+ self._update_action_config(build_config, field_value)
1057
+ # Keep the existing show/hide behaviour
169
1058
  self.show_hide_fields(build_config, field_value)
170
- if build_config["auth_link"]["value"] == "validated":
171
- return build_config
1059
+ return build_config
1060
+
1061
+ # Handle API key removal
172
1062
  if field_name == "api_key" and len(field_value) == 0:
173
1063
  build_config["auth_link"]["value"] = ""
174
1064
  build_config["auth_link"]["auth_tooltip"] = "Please provide a valid Composio API Key."
175
- build_config["action"]["options"] = []
176
- build_config["action"]["helper_text"] = "Please connect before selecting actions."
177
- build_config["action"]["helper_text_metadata"] = {"variant": "destructive"}
1065
+ build_config["action_button"]["options"] = []
1066
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1067
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
1068
+ build_config["auth_link"].pop("connection_id", None)
178
1069
  return build_config
1070
+
1071
+ # Only proceed with connection logic if we have an API key
179
1072
  if not hasattr(self, "api_key") or not self.api_key:
180
1073
  return build_config
181
1074
 
182
- # Build the action maps before using them
1075
+ # CRITICAL: If tool_mode is enabled (check both instance and build_config), skip all connection logic
1076
+ if current_tool_mode:
1077
+ build_config["action_button"]["show"] = False
1078
+ return build_config
1079
+
1080
+ # Update action options only if tool_mode is disabled
183
1081
  self._build_action_maps()
1082
+ # Only set options if they haven't been set already during action population
1083
+ if "options" not in build_config.get("action_button", {}) or not build_config["action_button"]["options"]:
1084
+ build_config["action_button"]["options"] = [
1085
+ {"name": self.sanitize_action_name(action), "metadata": action} for action in self._actions_data
1086
+ ]
1087
+ logger.debug("Setting action options from main logic path")
1088
+ else:
1089
+ logger.debug("Action options already set, skipping duplicate setting")
1090
+ # Only set show=True if tool_mode is not enabled
1091
+ if not current_tool_mode:
1092
+ build_config["action_button"]["show"] = True
184
1093
 
185
- # Update the action options
186
- build_config["action"]["options"] = [
187
- {
188
- "name": self.sanitize_action_name(action),
189
- "metadata": action,
190
- }
191
- for action in self._actions_data
192
- ]
1094
+ stored_connection_id = build_config.get("auth_link", {}).get("connection_id")
1095
+ active_connection_id = None
193
1096
 
194
- try:
195
- toolset = self._build_wrapper()
196
- entity = toolset.client.get_entity(id=self.entity_id)
1097
+ if stored_connection_id:
1098
+ status = self._check_connection_status_by_id(stored_connection_id)
1099
+ if status == "ACTIVE":
1100
+ active_connection_id = stored_connection_id
197
1101
 
198
- try:
199
- entity.get_connection(app=self.app_name)
200
- build_config["auth_link"]["value"] = "validated"
201
- build_config["auth_link"]["auth_tooltip"] = "Disconnect"
202
- build_config["action"]["helper_text"] = None
203
- build_config["action"]["helper_text_metadata"] = {}
204
- except NoItemsFound:
205
- auth_scheme = self._get_auth_scheme(self.app_name)
206
- if auth_scheme and auth_scheme.auth_mode == "OAUTH2":
207
- try:
208
- build_config["auth_link"]["value"] = self._initiate_default_connection(entity, self.app_name)
209
- build_config["auth_link"]["auth_tooltip"] = "Connect"
210
- except (ValueError, ConnectionError, ApiKeyError) as e:
211
- build_config["auth_link"]["value"] = "disabled"
212
- build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
213
- logger.error(f"Error checking auth status: {e}")
214
-
215
- except (ValueError, ConnectionError) as e:
216
- build_config["auth_link"]["value"] = "error"
217
- build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
218
- logger.error(f"Error checking auth status: {e}")
219
- except ApiKeyError as e:
220
- build_config["auth_link"]["value"] = ""
221
- build_config["auth_link"]["auth_tooltip"] = "Please provide a valid Composio API Key."
222
- build_config["action"]["options"] = []
223
- build_config["action"]["value"] = ""
224
- build_config["action"]["helper_text"] = "Please connect before selecting actions."
225
- build_config["action"]["helper_text_metadata"] = {"variant": "destructive"}
226
- logger.error(f"Error checking auth status: {e}")
1102
+ if not active_connection_id:
1103
+ active_connection = self._find_active_connection_for_app(self.app_name)
1104
+ if active_connection:
1105
+ active_connection_id, _ = active_connection
1106
+ if "auth_link" not in build_config:
1107
+ build_config["auth_link"] = {}
1108
+ build_config["auth_link"]["connection_id"] = active_connection_id
227
1109
 
228
- # Handle disconnection
229
- if field_name == "auth_link" and field_value == "disconnect":
230
- try:
231
- for field in self._all_fields:
232
- build_config[field]["show"] = False
233
- toolset = self._build_wrapper()
234
- entity = toolset.client.get_entity(id=self.entity_id)
235
- self.disconnect_connection(entity, self.app_name)
236
- build_config["auth_link"]["value"] = self._initiate_default_connection(entity, self.app_name)
1110
+ if active_connection_id:
1111
+ build_config["auth_link"]["value"] = "validated"
1112
+ build_config["auth_link"]["auth_tooltip"] = "Disconnect"
1113
+ build_config["action_button"]["helper_text"] = ""
1114
+ build_config["action_button"]["helper_text_metadata"] = {}
1115
+ elif stored_connection_id:
1116
+ status = self._check_connection_status_by_id(stored_connection_id)
1117
+ if status == "INITIATED":
1118
+ current_value = build_config.get("auth_link", {}).get("value")
1119
+ if not current_value or current_value == "connect":
1120
+ build_config["auth_link"]["value"] = "connect"
237
1121
  build_config["auth_link"]["auth_tooltip"] = "Connect"
238
- build_config["action"]["helper_text"] = "Please connect before selecting actions."
239
- build_config["action"]["helper_text_metadata"] = {
240
- "variant": "destructive",
241
- }
242
- build_config["action"]["options"] = []
243
- build_config["action"]["value"] = ""
244
- except (ValueError, ConnectionError, ApiKeyError) as e:
245
- build_config["auth_link"]["value"] = "error"
246
- build_config["auth_link"]["auth_tooltip"] = f"Failed to disconnect from the app: {e}"
247
- logger.error(f"Error disconnecting: {e}")
248
- if field_name == "auth_link" and field_value == "validated":
249
- build_config["action"]["helper_text"] = ""
250
- build_config["action"]["helper_text_metadata"] = {"icon": "Check", "variant": "success"}
1122
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1123
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
1124
+ else:
1125
+ # Connection not found or other status
1126
+ build_config["auth_link"]["value"] = "connect"
1127
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
1128
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1129
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
1130
+ else:
1131
+ build_config["auth_link"]["value"] = "connect"
1132
+ build_config["auth_link"]["auth_tooltip"] = "Connect"
1133
+ build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1134
+ build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
251
1135
 
252
- return build_config
1136
+ if self._is_tool_mode_enabled():
1137
+ build_config["action_button"]["show"] = False
253
1138
 
254
- def _get_auth_scheme(self, app_name: str) -> AppAuthScheme:
255
- """Get the primary auth scheme for an app."""
256
- toolset = self._build_wrapper()
257
- try:
258
- return toolset.get_auth_scheme_for_app(app=app_name.lower())
259
- except (ValueError, ConnectionError, NoItemsFound):
260
- logger.exception(f"Error getting auth scheme for {app_name}")
261
- return None
262
-
263
- def _initiate_default_connection(self, entity: Any, app: str) -> str:
264
- connection = entity.initiate_connection(app_name=app, use_composio_auth=True, force_new_integration=True)
265
- return connection.redirectUrl
1139
+ return build_config
266
1140
 
267
- def disconnect_connection(self, entity: Any, app: str) -> None:
268
- """Disconnect a Composio connection."""
269
- try:
270
- # Get the connection first
271
- connection = entity.get_connection(app=app)
272
- # Delete the connection using the integrations collection
273
- entity.client.integrations.remove(id=connection.integrationId)
274
- except Exception as e:
275
- logger.error(f"Error disconnecting from {app}: {e}")
276
- msg = f"Failed to disconnect from {app}: {e}"
277
- raise ValueError(msg) from e
1141
+ def configure_tools(self, composio: Composio, limit: int | None = None) -> list[Tool]:
1142
+ if limit is None:
1143
+ limit = 999
278
1144
 
279
- def configure_tools(self, toolset: ComposioToolSet) -> list[Tool]:
280
- tools = toolset.get_tools(actions=self._actions_data.keys())
281
- logger.info(f"Tools: {tools}")
1145
+ tools = composio.tools.get(user_id=self.entity_id, toolkits=[self.app_name.lower()], limit=limit)
282
1146
  configured_tools = []
283
1147
  for tool in tools:
284
1148
  # Set the sanitized name
@@ -293,20 +1157,122 @@ class ComposioBaseComponent(Component):
293
1157
 
294
1158
  async def _get_tools(self) -> list[Tool]:
295
1159
  """Get tools with cached results and optimized name sanitization."""
296
- toolset = self._build_wrapper()
1160
+ composio = self._build_wrapper()
297
1161
  self.set_default_tools()
298
- return self.configure_tools(toolset)
1162
+ return self.configure_tools(composio)
299
1163
 
300
1164
  @property
301
1165
  def enabled_tools(self):
302
- if not hasattr(self, "action") or not self.action or not isinstance(self.action, list):
1166
+ """Return tag names for actions of this app that should be exposed to the agent.
1167
+
1168
+ If default tools are set via set_default_tools(), returns those.
1169
+ Otherwise, returns only the first few tools (limited by default_tools_limit)
1170
+ to prevent overwhelming the agent. Subclasses can override this behavior.
1171
+
1172
+ """
1173
+ if not self._actions_data:
1174
+ self._populate_actions_data()
1175
+
1176
+ if hasattr(self, "_default_tools") and self._default_tools:
303
1177
  return list(self._default_tools)
304
- return list(self._default_tools.union(action["name"].replace(" ", "-") for action in self.action))
305
1178
 
306
- @abstractmethod
307
- def execute_action(self) -> list[dict]:
308
- """Execute action and return response as Message."""
1179
+ all_tools = list(self._actions_data.keys())
1180
+ limit = getattr(self, "default_tools_limit", 5)
1181
+ return all_tools[:limit]
1182
+
1183
+ def execute_action(self):
1184
+ """Execute the selected Composio tool."""
1185
+ composio = self._build_wrapper()
1186
+ self._populate_actions_data()
1187
+ self._build_action_maps()
1188
+
1189
+ display_name = (
1190
+ self.action_button[0]["name"]
1191
+ if isinstance(getattr(self, "action_button", None), list) and self.action_button
1192
+ else self.action_button
1193
+ )
1194
+ action_key = self._display_to_key_map.get(display_name)
1195
+
1196
+ if not action_key:
1197
+ msg = f"Invalid action: {display_name}"
1198
+ raise ValueError(msg)
1199
+
1200
+ try:
1201
+ arguments: dict[str, Any] = {}
1202
+ param_fields = self._actions_data.get(action_key, {}).get("action_fields", [])
1203
+
1204
+ schema_dict = self._action_schemas.get(action_key, {})
1205
+ parameters_schema = schema_dict.get("input_parameters", {})
1206
+ schema_properties = parameters_schema.get("properties", {}) if parameters_schema else {}
1207
+ # Handle case where 'required' field is None (causes "'NoneType' object is not iterable")
1208
+ required_list = parameters_schema.get("required", []) if parameters_schema else []
1209
+ required_fields = set(required_list) if required_list is not None else set()
1210
+
1211
+ for field in param_fields:
1212
+ if not hasattr(self, field):
1213
+ continue
1214
+ value = getattr(self, field)
1215
+
1216
+ # Skip None, empty strings, and empty lists
1217
+ if value is None or value == "" or (isinstance(value, list) and len(value) == 0):
1218
+ continue
1219
+
1220
+ # For optional fields, be more strict about including them
1221
+ # Only include if the user has explicitly provided a meaningful value
1222
+ if field not in required_fields:
1223
+ # Get the default value from the schema
1224
+ field_schema = schema_properties.get(field, {})
1225
+ schema_default = field_schema.get("default")
1226
+
1227
+ # Skip if the current value matches the schema default
1228
+ if value == schema_default:
1229
+ continue
1230
+
1231
+ # Convert comma-separated to list for array parameters (heuristic)
1232
+ prop_schema = schema_properties.get(field, {})
1233
+ if prop_schema.get("type") == "array" and isinstance(value, str):
1234
+ value = [item.strip() for item in value.split(",")]
1235
+
1236
+ if field in self._bool_variables:
1237
+ value = bool(value)
1238
+
1239
+ # Handle renamed fields - map back to original names for API execution
1240
+ final_field_name = field
1241
+ if field.endswith("_user_id") and field.startswith(self.app_name):
1242
+ final_field_name = "user_id"
1243
+
1244
+ arguments[final_field_name] = value
1245
+
1246
+ # Execute using new SDK
1247
+ result = composio.tools.execute(
1248
+ slug=action_key,
1249
+ arguments=arguments,
1250
+ user_id=self.entity_id,
1251
+ )
1252
+
1253
+ if isinstance(result, dict) and "successful" in result:
1254
+ if result["successful"]:
1255
+ raw_data = result.get("data", result)
1256
+ return self._apply_post_processor(action_key, raw_data)
1257
+ error_msg = result.get("error", "Tool execution failed")
1258
+ raise ValueError(error_msg)
1259
+
1260
+ except ValueError as e:
1261
+ logger.error(f"Failed to execute {action_key}: {e}")
1262
+ raise
1263
+
1264
+ def _apply_post_processor(self, action_key: str, raw_data: Any) -> Any:
1265
+ """Apply post-processor for the given action if defined."""
1266
+ if hasattr(self, "post_processors") and isinstance(self.post_processors, dict):
1267
+ processor_func = self.post_processors.get(action_key)
1268
+ if processor_func and callable(processor_func):
1269
+ try:
1270
+ return processor_func(raw_data)
1271
+ except (TypeError, ValueError, KeyError) as e:
1272
+ logger.error(f"Error in post-processor for {action_key}: {e} (Exception type: {type(e).__name__})")
1273
+ return raw_data
1274
+
1275
+ return raw_data
309
1276
 
310
- @abstractmethod
311
1277
  def set_default_tools(self):
312
1278
  """Set the default tools."""