lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +21 -4
  3. lfx/base/agents/altk_base_agent.py +393 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +2 -1
  6. lfx/base/composio/composio_base.py +159 -224
  7. lfx/base/data/base_file.py +97 -20
  8. lfx/base/data/docling_utils.py +61 -10
  9. lfx/base/data/storage_utils.py +301 -0
  10. lfx/base/data/utils.py +178 -14
  11. lfx/base/mcp/util.py +2 -2
  12. lfx/base/models/anthropic_constants.py +21 -12
  13. lfx/base/models/groq_constants.py +74 -58
  14. lfx/base/models/groq_model_discovery.py +265 -0
  15. lfx/base/models/model.py +1 -1
  16. lfx/base/models/model_utils.py +100 -0
  17. lfx/base/models/openai_constants.py +7 -0
  18. lfx/base/models/watsonx_constants.py +32 -8
  19. lfx/base/tools/run_flow.py +601 -129
  20. lfx/cli/commands.py +9 -4
  21. lfx/cli/common.py +2 -2
  22. lfx/cli/run.py +1 -1
  23. lfx/cli/script_loader.py +53 -11
  24. lfx/components/Notion/create_page.py +1 -1
  25. lfx/components/Notion/list_database_properties.py +1 -1
  26. lfx/components/Notion/list_pages.py +1 -1
  27. lfx/components/Notion/list_users.py +1 -1
  28. lfx/components/Notion/page_content_viewer.py +1 -1
  29. lfx/components/Notion/search.py +1 -1
  30. lfx/components/Notion/update_page_property.py +1 -1
  31. lfx/components/__init__.py +19 -5
  32. lfx/components/{agents → altk}/__init__.py +5 -9
  33. lfx/components/altk/altk_agent.py +193 -0
  34. lfx/components/apify/apify_actor.py +1 -1
  35. lfx/components/composio/__init__.py +70 -18
  36. lfx/components/composio/apollo_composio.py +11 -0
  37. lfx/components/composio/bitbucket_composio.py +11 -0
  38. lfx/components/composio/canva_composio.py +11 -0
  39. lfx/components/composio/coda_composio.py +11 -0
  40. lfx/components/composio/composio_api.py +10 -0
  41. lfx/components/composio/discord_composio.py +1 -1
  42. lfx/components/composio/elevenlabs_composio.py +11 -0
  43. lfx/components/composio/exa_composio.py +11 -0
  44. lfx/components/composio/firecrawl_composio.py +11 -0
  45. lfx/components/composio/fireflies_composio.py +11 -0
  46. lfx/components/composio/gmail_composio.py +1 -1
  47. lfx/components/composio/googlebigquery_composio.py +11 -0
  48. lfx/components/composio/googlecalendar_composio.py +1 -1
  49. lfx/components/composio/googledocs_composio.py +1 -1
  50. lfx/components/composio/googlemeet_composio.py +1 -1
  51. lfx/components/composio/googlesheets_composio.py +1 -1
  52. lfx/components/composio/googletasks_composio.py +1 -1
  53. lfx/components/composio/heygen_composio.py +11 -0
  54. lfx/components/composio/mem0_composio.py +11 -0
  55. lfx/components/composio/peopledatalabs_composio.py +11 -0
  56. lfx/components/composio/perplexityai_composio.py +11 -0
  57. lfx/components/composio/serpapi_composio.py +11 -0
  58. lfx/components/composio/slack_composio.py +3 -574
  59. lfx/components/composio/slackbot_composio.py +1 -1
  60. lfx/components/composio/snowflake_composio.py +11 -0
  61. lfx/components/composio/tavily_composio.py +11 -0
  62. lfx/components/composio/youtube_composio.py +2 -2
  63. lfx/components/cuga/__init__.py +34 -0
  64. lfx/components/cuga/cuga_agent.py +730 -0
  65. lfx/components/data/__init__.py +78 -28
  66. lfx/components/data_source/__init__.py +58 -0
  67. lfx/components/{data → data_source}/api_request.py +26 -3
  68. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  69. lfx/components/{data → data_source}/json_to_data.py +15 -8
  70. lfx/components/{data → data_source}/news_search.py +1 -1
  71. lfx/components/{data → data_source}/rss.py +1 -1
  72. lfx/components/{data → data_source}/sql_executor.py +1 -1
  73. lfx/components/{data → data_source}/url.py +1 -1
  74. lfx/components/{data → data_source}/web_search.py +1 -1
  75. lfx/components/datastax/astradb_cql.py +1 -1
  76. lfx/components/datastax/astradb_graph.py +1 -1
  77. lfx/components/datastax/astradb_tool.py +1 -1
  78. lfx/components/datastax/astradb_vectorstore.py +1 -1
  79. lfx/components/datastax/hcd.py +1 -1
  80. lfx/components/deactivated/json_document_builder.py +1 -1
  81. lfx/components/docling/__init__.py +0 -3
  82. lfx/components/docling/chunk_docling_document.py +3 -1
  83. lfx/components/docling/export_docling_document.py +3 -1
  84. lfx/components/elastic/elasticsearch.py +1 -1
  85. lfx/components/files_and_knowledge/__init__.py +47 -0
  86. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  87. lfx/components/{data → files_and_knowledge}/file.py +304 -24
  88. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
  89. lfx/components/{data → files_and_knowledge}/save_file.py +218 -31
  90. lfx/components/flow_controls/__init__.py +58 -0
  91. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  92. lfx/components/{logic → flow_controls}/loop.py +43 -9
  93. lfx/components/flow_controls/run_flow.py +108 -0
  94. lfx/components/glean/glean_search_api.py +1 -1
  95. lfx/components/groq/groq.py +35 -28
  96. lfx/components/helpers/__init__.py +102 -0
  97. lfx/components/ibm/watsonx.py +7 -1
  98. lfx/components/input_output/__init__.py +3 -1
  99. lfx/components/input_output/chat.py +4 -3
  100. lfx/components/input_output/chat_output.py +10 -4
  101. lfx/components/input_output/text.py +1 -1
  102. lfx/components/input_output/text_output.py +1 -1
  103. lfx/components/{data → input_output}/webhook.py +1 -1
  104. lfx/components/knowledge_bases/__init__.py +59 -4
  105. lfx/components/langchain_utilities/character.py +1 -1
  106. lfx/components/langchain_utilities/csv_agent.py +84 -16
  107. lfx/components/langchain_utilities/json_agent.py +67 -12
  108. lfx/components/langchain_utilities/language_recursive.py +1 -1
  109. lfx/components/llm_operations/__init__.py +46 -0
  110. lfx/components/{processing → llm_operations}/batch_run.py +17 -8
  111. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  112. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  113. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  114. lfx/components/{processing → llm_operations}/structured_output.py +1 -1
  115. lfx/components/logic/__init__.py +126 -0
  116. lfx/components/mem0/mem0_chat_memory.py +11 -0
  117. lfx/components/models/__init__.py +64 -9
  118. lfx/components/models_and_agents/__init__.py +49 -0
  119. lfx/components/{agents → models_and_agents}/agent.py +6 -4
  120. lfx/components/models_and_agents/embedding_model.py +353 -0
  121. lfx/components/models_and_agents/language_model.py +398 -0
  122. lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
  123. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  124. lfx/components/nvidia/system_assist.py +1 -1
  125. lfx/components/olivya/olivya.py +1 -1
  126. lfx/components/ollama/ollama.py +24 -5
  127. lfx/components/processing/__init__.py +9 -60
  128. lfx/components/processing/converter.py +1 -1
  129. lfx/components/processing/dataframe_operations.py +1 -1
  130. lfx/components/processing/parse_json_data.py +2 -2
  131. lfx/components/processing/parser.py +1 -1
  132. lfx/components/processing/split_text.py +1 -1
  133. lfx/components/qdrant/qdrant.py +1 -1
  134. lfx/components/redis/redis.py +1 -1
  135. lfx/components/twelvelabs/split_video.py +10 -0
  136. lfx/components/twelvelabs/video_file.py +12 -0
  137. lfx/components/utilities/__init__.py +43 -0
  138. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  139. lfx/components/{helpers → utilities}/current_date.py +1 -1
  140. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  141. lfx/components/vectorstores/local_db.py +9 -0
  142. lfx/components/youtube/youtube_transcripts.py +118 -30
  143. lfx/custom/custom_component/component.py +57 -1
  144. lfx/custom/custom_component/custom_component.py +68 -6
  145. lfx/custom/directory_reader/directory_reader.py +5 -2
  146. lfx/graph/edge/base.py +43 -20
  147. lfx/graph/state/model.py +15 -2
  148. lfx/graph/utils.py +6 -0
  149. lfx/graph/vertex/param_handler.py +10 -7
  150. lfx/helpers/__init__.py +12 -0
  151. lfx/helpers/flow.py +117 -0
  152. lfx/inputs/input_mixin.py +24 -1
  153. lfx/inputs/inputs.py +13 -1
  154. lfx/interface/components.py +161 -83
  155. lfx/log/logger.py +5 -3
  156. lfx/schema/image.py +2 -12
  157. lfx/services/database/__init__.py +5 -0
  158. lfx/services/database/service.py +25 -0
  159. lfx/services/deps.py +87 -22
  160. lfx/services/interfaces.py +5 -0
  161. lfx/services/manager.py +24 -10
  162. lfx/services/mcp_composer/service.py +1029 -162
  163. lfx/services/session.py +5 -0
  164. lfx/services/settings/auth.py +18 -11
  165. lfx/services/settings/base.py +56 -30
  166. lfx/services/settings/constants.py +8 -0
  167. lfx/services/storage/local.py +108 -46
  168. lfx/services/storage/service.py +171 -29
  169. lfx/template/field/base.py +3 -0
  170. lfx/utils/image.py +29 -11
  171. lfx/utils/ssrf_protection.py +384 -0
  172. lfx/utils/validate_cloud.py +26 -0
  173. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/METADATA +38 -22
  174. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/RECORD +189 -160
  175. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/WHEEL +1 -1
  176. lfx/components/agents/altk_agent.py +0 -366
  177. lfx/components/agents/cuga_agent.py +0 -1013
  178. lfx/components/docling/docling_remote_vlm.py +0 -284
  179. lfx/components/logic/run_flow.py +0 -71
  180. lfx/components/models/embedding_model.py +0 -195
  181. lfx/components/models/language_model.py +0 -144
  182. lfx/components/processing/dataframe_to_toolset.py +0 -259
  183. /lfx/components/{data → data_source}/mock_data.py +0 -0
  184. /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
  185. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  186. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  187. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  188. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  189. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  190. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  191. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  192. /lfx/components/{helpers → processing}/create_list.py +0 -0
  193. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  194. /lfx/components/{helpers → processing}/store_message.py +0 -0
  195. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  196. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/entry_points.txt +0 -0
@@ -36,6 +36,7 @@ from lfx.schema.data import Data
36
36
  from lfx.schema.log import Log
37
37
  from lfx.schema.message import ErrorMessage, Message
38
38
  from lfx.schema.properties import Source
39
+ from lfx.serialization.serialization import serialize
39
40
  from lfx.template.field.base import UNDEFINED, Input, Output
40
41
  from lfx.template.frontend_node.custom_components import ComponentFrontendNode
41
42
  from lfx.utils.async_helpers import run_until_complete
@@ -93,6 +94,20 @@ class PlaceholderGraph(NamedTuple):
93
94
  context: dict
94
95
  flow_name: str | None
95
96
 
97
+ def get_vertex_neighbors(self, _vertex) -> dict:
98
+ """Returns an empty dictionary since PlaceholderGraph has no edges or neighbors.
99
+
100
+ This method exists for compatibility with real Graph objects, allowing components
101
+ to check graph connectivity even when running in isolation (e.g., in tests).
102
+
103
+ Args:
104
+ _vertex: The vertex to check neighbors for (ignored in placeholder context).
105
+
106
+ Returns:
107
+ An empty dictionary, indicating no neighbors exist.
108
+ """
109
+ return {}
110
+
96
111
 
97
112
  class Component(CustomComponent):
98
113
  inputs: list[InputTypes] = []
@@ -121,6 +136,7 @@ class Component(CustomComponent):
121
136
  self._components: list[Component] = []
122
137
  self._event_manager: EventManager | None = None
123
138
  self._state_model = None
139
+ self._telemetry_input_values: dict[str, Any] | None = None
124
140
 
125
141
  # Process input kwargs
126
142
  inputs = {}
@@ -528,6 +544,8 @@ class Component(CustomComponent):
528
544
  ValueError: If the input name is None.
529
545
 
530
546
  """
547
+ telemetry_values = {}
548
+
531
549
  for input_ in inputs:
532
550
  if input_.name is None:
533
551
  msg = self.build_component_error_message("Input name cannot be None")
@@ -537,6 +555,28 @@ class Component(CustomComponent):
537
555
  except TypeError:
538
556
  self._inputs[input_.name] = input_
539
557
 
558
+ # Build telemetry data during existing iteration (no performance impact)
559
+ if self._should_track_input(input_):
560
+ telemetry_values[input_.name] = serialize(input_.value)
561
+
562
+ # Cache for later O(1) retrieval
563
+ self._telemetry_input_values = telemetry_values if telemetry_values else None
564
+
565
+ def _should_track_input(self, input_obj: InputTypes) -> bool:
566
+ """Check if input should be tracked in telemetry."""
567
+ from lfx.inputs.input_mixin import SENSITIVE_FIELD_TYPES
568
+
569
+ # Respect opt-in flag (default: False for privacy)
570
+ if not getattr(input_obj, "track_in_telemetry", False):
571
+ return False
572
+ # Auto-exclude sensitive field types
573
+ return not (hasattr(input_obj, "field_type") and input_obj.field_type in SENSITIVE_FIELD_TYPES)
574
+
575
+ def get_telemetry_input_values(self) -> dict[str, Any] | None:
576
+ """Get cached telemetry input values. O(1) lookup, no iteration."""
577
+ # Return all values including descriptive strings and None
578
+ return self._telemetry_input_values if self._telemetry_input_values else None
579
+
540
580
  def validate(self, params: dict) -> None:
541
581
  """Validates the component parameters.
542
582
 
@@ -551,7 +591,10 @@ class Component(CustomComponent):
551
591
  self._validate_outputs()
552
592
 
553
593
  async def run_and_validate_update_outputs(self, frontend_node: dict, field_name: str, field_value: Any):
554
- frontend_node = self.update_outputs(frontend_node, field_name, field_value)
594
+ if inspect.iscoroutinefunction(self.update_outputs):
595
+ frontend_node = await self.update_outputs(frontend_node, field_name, field_value)
596
+ else:
597
+ frontend_node = self.update_outputs(frontend_node, field_name, field_value)
555
598
  if field_name == "tool_mode" or frontend_node.get("tool_mode"):
556
599
  is_tool_mode = field_value or frontend_node.get("tool_mode")
557
600
  frontend_node["outputs"] = [self._build_tool_output()] if is_tool_mode else frontend_node["outputs"]
@@ -1513,6 +1556,14 @@ class Component(CustomComponent):
1513
1556
 
1514
1557
  return has_chat_output(self.graph.get_vertex_neighbors(self._vertex))
1515
1558
 
1559
+ def is_connected_to_chat_input(self) -> bool:
1560
+ # Lazy import to avoid circular dependency
1561
+ from lfx.graph.utils import has_chat_input
1562
+
1563
+ if self.graph is None:
1564
+ return False
1565
+ return has_chat_input(self.graph.get_vertex_neighbors(self._vertex))
1566
+
1516
1567
  def _should_skip_message(self, message: Message) -> bool:
1517
1568
  """Check if the message should be skipped based on vertex configuration and message type."""
1518
1569
  return (
@@ -1591,7 +1642,12 @@ class Component(CustomComponent):
1591
1642
  ):
1592
1643
  complete_message = await self._stream_message(message.text, stored_message)
1593
1644
  stored_message.text = complete_message
1645
+ if complete_message:
1646
+ stored_message.properties.state = "complete"
1594
1647
  stored_message = await self._update_stored_message(stored_message)
1648
+ # Note: We intentionally do NOT send a message event here with state="complete"
1649
+ # The frontend already has all the content from streaming tokens
1650
+ # Only the database is updated with the complete state
1595
1651
  else:
1596
1652
  # Only send message event for non-streaming messages
1597
1653
  await self._send_message_event(stored_message, id_=id_)
@@ -12,7 +12,14 @@ from pydantic import BaseModel
12
12
 
13
13
  from lfx.custom import validate
14
14
  from lfx.custom.custom_component.base_component import BaseComponent
15
- from lfx.helpers import list_flows, load_flow, run_flow
15
+ from lfx.helpers import (
16
+ get_flow_by_id_or_name,
17
+ list_flows,
18
+ list_flows_by_flow_folder,
19
+ list_flows_by_folder_id,
20
+ load_flow,
21
+ run_flow,
22
+ )
16
23
  from lfx.log.logger import logger
17
24
  from lfx.schema.data import Data
18
25
  from lfx.services.deps import get_storage_service, get_variable_service, session_scope
@@ -97,7 +104,6 @@ class CustomComponent(BaseComponent):
97
104
  self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60)
98
105
  self._results: dict = {}
99
106
  self._artifacts: dict = {}
100
-
101
107
  # Call parent's init after setting up our attributes
102
108
  super().__init__(**data)
103
109
 
@@ -548,15 +554,51 @@ class CustomComponent(BaseComponent):
548
554
  return run_until_complete(self.alist_flows())
549
555
 
550
556
  async def alist_flows(self) -> list[Data]:
551
- if not self.user_id:
552
- msg = "Session is invalid"
553
- raise ValueError(msg)
554
- try:
557
+ """List all flows for the current user."""
558
+ try: # user id is validated in the function
555
559
  return await list_flows(user_id=str(self.user_id))
556
560
  except Exception as e:
557
561
  msg = f"Error listing flows: {e}"
558
562
  raise ValueError(msg) from e
559
563
 
564
+ async def alist_flows_by_flow_folder(self) -> list[Data]:
565
+ """List all flows for the current user in the same folder as the current flow."""
566
+ flow_id = self._get_runtime_or_frontend_node_attr("flow_id")
567
+ if flow_id is not None:
568
+ try: # user and flow ids are validated in the function
569
+ return await list_flows_by_flow_folder(user_id=str(self.user_id), flow_id=str(flow_id))
570
+ except Exception as e:
571
+ msg = f"Error listing flows: {e}"
572
+ raise ValueError(msg) from e
573
+ return []
574
+
575
+ async def alist_flows_by_folder_id(self) -> list[Data]:
576
+ """List all flows for the current user in the same folder as the current flow."""
577
+ folder_id = self._get_runtime_or_frontend_node_attr("folder_id")
578
+ if folder_id is not None:
579
+ try: # user and flow ids are validated in the function
580
+ return await list_flows_by_folder_id(
581
+ user_id=str(self.user_id),
582
+ folder_id=str(folder_id),
583
+ )
584
+ except Exception as e:
585
+ msg = f"Error listing flows: {e}"
586
+ raise ValueError(msg) from e
587
+ return []
588
+
589
+ async def aget_flow_by_id_or_name(self) -> Data | None:
590
+ flow_id = self._get_runtime_or_frontend_node_attr("flow_id")
591
+ flow_name = self._get_runtime_or_frontend_node_attr("flow_name")
592
+ if flow_id or flow_name:
593
+ try: # user and flow ids are validated in the function
594
+ return await get_flow_by_id_or_name(
595
+ user_id=str(self.user_id), flow_id=str(flow_id) if flow_id else None, flow_name=flow_name
596
+ )
597
+ except Exception as e:
598
+ msg = f"Error listing flows: {e}"
599
+ raise ValueError(msg) from e
600
+ return None
601
+
560
602
  def build(self, *args: Any, **kwargs: Any) -> Any:
561
603
  """Builds the custom component.
562
604
 
@@ -586,3 +628,23 @@ class CustomComponent(BaseComponent):
586
628
  if self.tracing_service and hasattr(self.tracing_service, "get_langchain_callbacks"):
587
629
  return self.tracing_service.get_langchain_callbacks()
588
630
  return []
631
+
632
+ def _get_runtime_or_frontend_node_attr(self, attr_name: str) -> Any:
633
+ """Get attribute value from the attribute name.
634
+
635
+ Falls back to frontend node attribute version
636
+ if it was provided (expected when updating the component's
637
+ build config).
638
+
639
+ Args:
640
+ attr_name: The attribute name (e.g., "flow_id", "flow_name")
641
+
642
+ Returns:
643
+ The attribute value from runtime or frontend node attribute, or None if neither exists.
644
+ """
645
+ value = getattr(self, attr_name, None)
646
+ if value is None:
647
+ attr = f"_frontend_node_{attr_name}"
648
+ if hasattr(self, attr):
649
+ value = getattr(self, attr)
650
+ return value
@@ -72,8 +72,11 @@ class DirectoryReader:
72
72
  if component["error"] if with_errors else not component["error"]:
73
73
  component_tuple = (*build_component(component), component)
74
74
  components.append(component_tuple)
75
- except Exception: # noqa: BLE001
76
- logger.debug(f"Error while loading component {component['name']} from {component['file']}")
75
+ except Exception as exc: # noqa: BLE001
76
+ logger.debug(
77
+ f"Skipping component {component['name']} from {component['file']} (load error)",
78
+ exc_info=exc,
79
+ )
77
80
  continue
78
81
  items.append({"name": menu["name"], "path": menu["path"], "components": components})
79
82
  filtered = [menu for menu in items if menu["components"]]
lfx/graph/edge/base.py CHANGED
@@ -149,32 +149,55 @@ class Edge:
149
149
  # .outputs is a list of Output objects as dictionaries
150
150
  # meaning: check for "types" key in each dictionary
151
151
  self.source_types = [output for output in source.outputs if output["name"] == self.source_handle.name]
152
- self.target_reqs = target.required_inputs + target.optional_inputs
153
- # Both lists contain strings and sometimes a string contains the value we are
154
- # looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
155
- # so we need to check if any of the strings in source_types is in target_reqs
156
- self.valid = any(
157
- any(output_type in target_req for output_type in output["types"])
158
- for output in self.source_types
159
- for target_req in self.target_reqs
160
- )
161
- # Get what type of input the target node is expecting
162
152
 
163
- # Update the matched type to be the first found match
164
- self.matched_type = next(
165
- (
166
- output_type
153
+ # Check if this is an loop input (loop target handle with output_types)
154
+ is_loop_input = hasattr(self.target_handle, "input_types") and self.target_handle.input_types
155
+ loop_input_types = []
156
+
157
+ if is_loop_input:
158
+ # For loop inputs, use the configured input_types
159
+ # (which already includes original type + loop_types from frontend)
160
+ loop_input_types = list(self.target_handle.input_types)
161
+ self.valid = any(
162
+ any(output_type in loop_input_types for output_type in output["types"]) for output in self.source_types
163
+ )
164
+ # Find the first matching type
165
+ self.matched_type = next(
166
+ (
167
+ output_type
168
+ for output in self.source_types
169
+ for output_type in output["types"]
170
+ if output_type in loop_input_types
171
+ ),
172
+ None,
173
+ )
174
+ else:
175
+ # Standard validation for regular inputs
176
+ self.target_reqs = target.required_inputs + target.optional_inputs
177
+ # Both lists contain strings and sometimes a string contains the value we are
178
+ # looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
179
+ # so we need to check if any of the strings in source_types is in target_reqs
180
+ self.valid = any(
181
+ any(output_type in target_req for output_type in output["types"])
167
182
  for output in self.source_types
168
- for output_type in output["types"]
169
183
  for target_req in self.target_reqs
170
- if output_type in target_req
171
- ),
172
- None,
173
- )
184
+ )
185
+ # Update the matched type to be the first found match
186
+ self.matched_type = next(
187
+ (
188
+ output_type
189
+ for output in self.source_types
190
+ for output_type in output["types"]
191
+ for target_req in self.target_reqs
192
+ if output_type in target_req
193
+ ),
194
+ None,
195
+ )
196
+
174
197
  no_matched_type = self.matched_type is None
175
198
  if no_matched_type:
176
199
  logger.debug(self.source_types)
177
- logger.debug(self.target_reqs)
200
+ logger.debug(self.target_reqs if not is_loop_input else loop_input_types)
178
201
  msg = f"Edge between {source.vertex_type} and {target.vertex_type} has no matched type."
179
202
  raise ValueError(msg)
180
203
 
lfx/graph/state/model.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from collections.abc import Callable
2
2
  from typing import Any, get_type_hints
3
3
 
4
- from pydantic import ConfigDict, computed_field, create_model
4
+ from pydantic import BaseModel, ConfigDict, computed_field, create_model
5
5
  from pydantic.fields import FieldInfo
6
6
 
7
7
 
@@ -199,6 +199,7 @@ def create_state_model(model_name: str = "State", *, validate: bool = True, **kw
199
199
  with a 'get_output_by_method' attribute when validate is True.
200
200
  """
201
201
  fields = {}
202
+ computed_fields_dict = {}
202
203
 
203
204
  for name, value in kwargs.items():
204
205
  # Extract the return type from the method's type annotations
@@ -214,7 +215,8 @@ def create_state_model(model_name: str = "State", *, validate: bool = True, **kw
214
215
  if ("get_output_by_method" not in str(e) and "__self__" not in str(e)) or validate:
215
216
  raise
216
217
  property_method = value
217
- fields[name] = computed_field(property_method)
218
+ # Store computed fields separately to add them to the base class
219
+ computed_fields_dict[name] = computed_field(property_method)
218
220
  elif isinstance(value, FieldInfo):
219
221
  field_tuple = (value.annotation or Any, value)
220
222
  fields[name] = field_tuple
@@ -234,4 +236,15 @@ def create_state_model(model_name: str = "State", *, validate: bool = True, **kw
234
236
 
235
237
  # Create the model dynamically
236
238
  config_dict = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True)
239
+
240
+ # If we have computed fields, create a base class with them first
241
+ if computed_fields_dict:
242
+ # Create a base class with computed fields
243
+ base_class_attrs = computed_fields_dict.copy()
244
+ base_class_attrs["model_config"] = config_dict
245
+ base_state_model = type(f"{model_name}Base", (BaseModel,), base_class_attrs)
246
+
247
+ # Then create the final model with the base class
248
+ return create_model(model_name, __base__=base_state_model, __config__=config_dict, **fields)
249
+ # No computed fields, just create the model directly
237
250
  return create_model(model_name, __config__=config_dict, **fields)
lfx/graph/utils.py CHANGED
@@ -198,3 +198,9 @@ def has_chat_output(vertices: dict[Vertex, int]):
198
198
  from lfx.graph.schema import InterfaceComponentTypes
199
199
 
200
200
  return any(InterfaceComponentTypes.ChatOutput in vertex.id for vertex in vertices)
201
+
202
+
203
+ def has_chat_input(vertices: dict[Vertex, int]):
204
+ from lfx.graph.schema import InterfaceComponentTypes
205
+
206
+ return any(InterfaceComponentTypes.ChatInput in vertex.id for vertex in vertices)
@@ -3,7 +3,6 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import ast
6
- import os
7
6
  from typing import TYPE_CHECKING, Any
8
7
 
9
8
  import pandas as pd
@@ -129,6 +128,8 @@ class ParameterHandler:
129
128
 
130
129
  def should_skip_field(self, field_name: str, field: dict, params: dict[str, Any]) -> bool:
131
130
  """Determine if field should be skipped."""
131
+ if field.get("override_skip"):
132
+ return False
132
133
  return (
133
134
  field.get("type") == "other"
134
135
  or field_name in params
@@ -137,7 +138,10 @@ class ParameterHandler:
137
138
  )
138
139
 
139
140
  def process_file_field(self, field_name: str, field: dict, params: dict[str, Any]) -> dict[str, Any]:
140
- """Process file type fields."""
141
+ """Process file type fields.
142
+
143
+ Converts logical paths (flow_id/filename) to component-ready paths.
144
+ """
141
145
  if file_path := field.get("file_path"):
142
146
  try:
143
147
  full_path: str | list[str] = ""
@@ -146,12 +150,11 @@ class ParameterHandler:
146
150
  if isinstance(file_path, str):
147
151
  file_path = [file_path]
148
152
  for p in file_path:
149
- flow_id, file_name = os.path.split(p)
150
- path = self.storage_service.build_full_path(flow_id, file_name)
151
- full_path.append(path)
153
+ resolved = self.storage_service.resolve_component_path(p)
154
+ full_path.append(resolved)
152
155
  else:
153
- flow_id, file_name = os.path.split(file_path)
154
- full_path = self.storage_service.build_full_path(flow_id, file_name)
156
+ full_path = self.storage_service.resolve_component_path(file_path)
157
+
155
158
  except ValueError as e:
156
159
  if "too many values to unpack" in str(e):
157
160
  full_path = file_path
lfx/helpers/__init__.py CHANGED
@@ -36,8 +36,11 @@ if has_langflow_memory():
36
36
  from langflow.helpers.flow import (
37
37
  build_schema_from_inputs,
38
38
  get_arg_names,
39
+ get_flow_by_id_or_name,
39
40
  get_flow_inputs,
40
41
  list_flows,
42
+ list_flows_by_flow_folder,
43
+ list_flows_by_folder_id,
41
44
  load_flow,
42
45
  run_flow,
43
46
  )
@@ -69,8 +72,11 @@ if has_langflow_memory():
69
72
  from lfx.helpers.flow import (
70
73
  build_schema_from_inputs,
71
74
  get_arg_names,
75
+ get_flow_by_id_or_name,
72
76
  get_flow_inputs,
73
77
  list_flows,
78
+ list_flows_by_flow_folder,
79
+ list_flows_by_folder_id,
74
80
  load_flow,
75
81
  run_flow,
76
82
  )
@@ -102,8 +108,11 @@ else:
102
108
  from lfx.helpers.flow import (
103
109
  build_schema_from_inputs,
104
110
  get_arg_names,
111
+ get_flow_by_id_or_name,
105
112
  get_flow_inputs,
106
113
  list_flows,
114
+ list_flows_by_flow_folder,
115
+ list_flows_by_folder_id,
107
116
  load_flow,
108
117
  run_flow,
109
118
  )
@@ -121,8 +130,11 @@ __all__ = [
121
130
  "docs_to_data",
122
131
  "format_type",
123
132
  "get_arg_names",
133
+ "get_flow_by_id_or_name",
124
134
  "get_flow_inputs",
125
135
  "list_flows",
136
+ "list_flows_by_flow_folder",
137
+ "list_flows_by_folder_id",
126
138
  "load_flow",
127
139
  "run_flow",
128
140
  "safe_convert",
lfx/helpers/flow.py CHANGED
@@ -85,6 +85,123 @@ async def list_flows(*, user_id: str | None = None) -> list[Data]:
85
85
  return []
86
86
 
87
87
 
88
+ async def list_flows_by_flow_folder(
89
+ *,
90
+ user_id: str | None = None,
91
+ flow_id: str | None = None,
92
+ order_params: dict | None = {"column": "updated_at", "direction": "desc"}, # noqa: B006, ARG001
93
+ ) -> list[Data]:
94
+ """Lists flows for the given user and in the same folder as the specified flow.
95
+
96
+ Retrieves all flows belonging to the given user and identified by user_id
97
+ that belong to the same folder as the flow identified by flow_id if the flow belongs to the user.
98
+
99
+ Optionally accepts a dictionary of order parameters
100
+ to order the flows by the specified column and direction.
101
+ Default order column is "updated_at" and default order direction is "desc".
102
+
103
+ In lfx, this is a stub that returns an empty list since we don't have
104
+ a database backend by default.
105
+
106
+ Args:
107
+ user_id (str | None, optional): The user ID to list flows for. Defaults to None.
108
+ flow_id (str | None, optional): The flow ID to list flows in the same folder as. Defaults to None.
109
+ order_params (dict | None, optional): Parameters for ordering the flows.
110
+ Defaults to {"column": "updated_at", "direction": "desc"}.
111
+ - column: The column to order by. Defaults to "updated_at".
112
+ - direction: The direction to order by. Defaults to "desc".
113
+
114
+ Returns:
115
+ list[Data]: List of flows in the same folder as the flow identified by flow_id.
116
+
117
+ Raises:
118
+ ValueError: If user_id is not provided.
119
+ ValueError: If Flow ID is not provided.
120
+ """
121
+ if not user_id:
122
+ msg = "Session is invalid"
123
+ raise ValueError(msg)
124
+ if not flow_id:
125
+ msg = "Flow ID is required"
126
+ raise ValueError(msg)
127
+
128
+ # In lfx, we don't have a database backend by default
129
+ # This is a stub implementation
130
+ logger.warning("list_flows_by_flow_folder called but lfx doesn't have database backend by default")
131
+ return []
132
+
133
+
134
+ async def list_flows_by_folder_id(
135
+ *,
136
+ user_id: str | None = None,
137
+ folder_id: str | None = None,
138
+ ) -> list[Data]:
139
+ """Lists flows for the given user and in the same folder as the specified folder.
140
+
141
+ Retrieves all flows belonging to the user identified by user_id
142
+ that belong to the same folder as the folder identified by folder_id
143
+ if the folder belongs to the user.
144
+
145
+ In lfx, this is a stub that returns an empty list since we don't have
146
+ a database backend by default.
147
+
148
+ Args:
149
+ user_id (str | None, optional): The user ID to list flows for. Defaults to None.
150
+ folder_id (str | None, optional): The folder ID to list flows in the same folder as. Defaults to None.
151
+
152
+ Returns:
153
+ list[Data]: List of flows in the same folder as the folder identified by folder_id.
154
+
155
+ Raises:
156
+ ValueError: If user_id is not provided.
157
+ ValueError: If Folder ID is not provided.
158
+ """
159
+ if not user_id:
160
+ msg = "Session is invalid"
161
+ raise ValueError(msg)
162
+ if not folder_id:
163
+ msg = "Folder ID is required"
164
+ raise ValueError(msg)
165
+
166
+ # In lfx, we don't have a database backend by default
167
+ # This is a stub implementation
168
+ logger.warning("list_flows_by_folder_id called but lfx doesn't have database backend by default")
169
+ return []
170
+
171
+
172
+ async def get_flow_by_id_or_name(
173
+ user_id: str,
174
+ flow_id: str | None = None,
175
+ flow_name: str | None = None,
176
+ ) -> Data | None:
177
+ """Get a flow by ID or name.
178
+
179
+ Retrieves a flow by ID or name. If both are provided, flow_id is used.
180
+
181
+ In lfx, this is a stub that returns None since we don't have
182
+ a database backend by default.
183
+
184
+ Args:
185
+ user_id (str): The user ID to get the flow for.
186
+ flow_id (str | None, optional): The flow ID. Defaults to None.
187
+ flow_name (str | None, optional): The flow name. Defaults to None.
188
+
189
+ Returns:
190
+ Data | None: The flow data or None if not found.
191
+ """
192
+ if not user_id:
193
+ msg = "Session is invalid"
194
+ raise ValueError(msg)
195
+ if not (flow_id or flow_name):
196
+ msg = "Flow ID or Flow Name is required"
197
+ raise ValueError(msg)
198
+
199
+ # In lfx, we don't have a database backend by default
200
+ # This is a stub implementation
201
+ logger.warning("get_flow_by_id_or_name called but lfx doesn't have database backend by default")
202
+ return None
203
+
204
+
88
205
  async def load_flow(
89
206
  user_id: str, # noqa: ARG001
90
207
  flow_id: str | None = None,
lfx/inputs/input_mixin.py CHANGED
@@ -18,7 +18,7 @@ from lfx.schema.cross_module import CrossModuleModel
18
18
  class FieldTypes(str, Enum):
19
19
  TEXT = "str"
20
20
  INTEGER = "int"
21
- PASSWORD = "str" # noqa: PIE796
21
+ PASSWORD = "str" # noqa: PIE796 pragma: allowlist secret
22
22
  FLOAT = "float"
23
23
  BOOLEAN = "bool"
24
24
  DICT = "dict"
@@ -41,6 +41,15 @@ class FieldTypes(str, Enum):
41
41
 
42
42
  SerializableFieldTypes = Annotated[FieldTypes, PlainSerializer(lambda v: v.value, return_type=str)]
43
43
 
44
+ # Field types that should never be tracked in telemetry due to sensitive data
45
+ SENSITIVE_FIELD_TYPES = {
46
+ FieldTypes.PASSWORD,
47
+ FieldTypes.AUTH,
48
+ FieldTypes.FILE,
49
+ FieldTypes.CONNECTION,
50
+ FieldTypes.MCP,
51
+ }
52
+
44
53
 
45
54
  # Base mixin for common input field attributes and methods
46
55
  class BaseInputMixin(CrossModuleModel, validate_assignment=True): # type: ignore[call-arg]
@@ -52,6 +61,9 @@ class BaseInputMixin(CrossModuleModel, validate_assignment=True): # type: ignor
52
61
 
53
62
  field_type: SerializableFieldTypes = Field(default=FieldTypes.TEXT, alias="type")
54
63
 
64
+ override_skip: bool = False
65
+ """Specifies if the field should never be skipped. Defaults to False."""
66
+
55
67
  required: bool = False
56
68
  """Specifies if the field is required. Defaults to False."""
57
69
 
@@ -97,6 +109,13 @@ class BaseInputMixin(CrossModuleModel, validate_assignment=True): # type: ignor
97
109
  title_case: bool = False
98
110
  """Specifies if the field should be displayed in title case. Defaults to True."""
99
111
 
112
+ track_in_telemetry: CoalesceBool = False
113
+ """Specifies if the field value should be tracked in telemetry.
114
+
115
+ Defaults to False (opt-in). Automatically disabled for sensitive field types.
116
+ Individual input types can explicitly enable tracking for safe, useful data.
117
+ """
118
+
100
119
  def to_dict(self):
101
120
  return self.model_dump(exclude_none=True, by_alias=True)
102
121
 
@@ -291,6 +310,10 @@ class MultilineMixin(BaseModel):
291
310
  multiline: CoalesceBool = True
292
311
 
293
312
 
313
+ class AIMixin(BaseModel):
314
+ ai_enabled: CoalesceBool = False
315
+
316
+
294
317
  class LinkMixin(BaseModel):
295
318
  icon: str | None = None
296
319
  """Icon to be displayed in the link."""