lfx-nightly 0.1.13.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (237) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +121 -29
  3. lfx/base/agents/altk_base_agent.py +380 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +103 -35
  6. lfx/base/agents/utils.py +15 -2
  7. lfx/base/composio/composio_base.py +183 -233
  8. lfx/base/data/base_file.py +88 -21
  9. lfx/base/data/storage_utils.py +192 -0
  10. lfx/base/data/utils.py +178 -14
  11. lfx/base/datastax/__init__.py +5 -0
  12. lfx/{components/vectorstores/astradb.py → base/datastax/astradb_base.py} +84 -473
  13. lfx/base/embeddings/embeddings_class.py +113 -0
  14. lfx/base/io/chat.py +5 -4
  15. lfx/base/mcp/util.py +101 -15
  16. lfx/base/models/groq_constants.py +74 -58
  17. lfx/base/models/groq_model_discovery.py +265 -0
  18. lfx/base/models/model.py +1 -1
  19. lfx/base/models/model_input_constants.py +74 -7
  20. lfx/base/models/model_utils.py +100 -0
  21. lfx/base/models/ollama_constants.py +3 -0
  22. lfx/base/models/openai_constants.py +7 -0
  23. lfx/base/models/watsonx_constants.py +36 -0
  24. lfx/base/tools/run_flow.py +601 -129
  25. lfx/cli/commands.py +7 -4
  26. lfx/cli/common.py +2 -2
  27. lfx/cli/run.py +1 -1
  28. lfx/cli/script_loader.py +53 -11
  29. lfx/components/Notion/create_page.py +1 -1
  30. lfx/components/Notion/list_database_properties.py +1 -1
  31. lfx/components/Notion/list_pages.py +1 -1
  32. lfx/components/Notion/list_users.py +1 -1
  33. lfx/components/Notion/page_content_viewer.py +1 -1
  34. lfx/components/Notion/search.py +1 -1
  35. lfx/components/Notion/update_page_property.py +1 -1
  36. lfx/components/__init__.py +19 -5
  37. lfx/components/altk/__init__.py +34 -0
  38. lfx/components/altk/altk_agent.py +193 -0
  39. lfx/components/amazon/amazon_bedrock_converse.py +1 -1
  40. lfx/components/apify/apify_actor.py +4 -4
  41. lfx/components/composio/__init__.py +70 -18
  42. lfx/components/composio/apollo_composio.py +11 -0
  43. lfx/components/composio/bitbucket_composio.py +11 -0
  44. lfx/components/composio/canva_composio.py +11 -0
  45. lfx/components/composio/coda_composio.py +11 -0
  46. lfx/components/composio/composio_api.py +10 -0
  47. lfx/components/composio/discord_composio.py +1 -1
  48. lfx/components/composio/elevenlabs_composio.py +11 -0
  49. lfx/components/composio/exa_composio.py +11 -0
  50. lfx/components/composio/firecrawl_composio.py +11 -0
  51. lfx/components/composio/fireflies_composio.py +11 -0
  52. lfx/components/composio/gmail_composio.py +1 -1
  53. lfx/components/composio/googlebigquery_composio.py +11 -0
  54. lfx/components/composio/googlecalendar_composio.py +1 -1
  55. lfx/components/composio/googledocs_composio.py +1 -1
  56. lfx/components/composio/googlemeet_composio.py +1 -1
  57. lfx/components/composio/googlesheets_composio.py +1 -1
  58. lfx/components/composio/googletasks_composio.py +1 -1
  59. lfx/components/composio/heygen_composio.py +11 -0
  60. lfx/components/composio/mem0_composio.py +11 -0
  61. lfx/components/composio/peopledatalabs_composio.py +11 -0
  62. lfx/components/composio/perplexityai_composio.py +11 -0
  63. lfx/components/composio/serpapi_composio.py +11 -0
  64. lfx/components/composio/slack_composio.py +3 -574
  65. lfx/components/composio/slackbot_composio.py +1 -1
  66. lfx/components/composio/snowflake_composio.py +11 -0
  67. lfx/components/composio/tavily_composio.py +11 -0
  68. lfx/components/composio/youtube_composio.py +2 -2
  69. lfx/components/{agents → cuga}/__init__.py +5 -7
  70. lfx/components/cuga/cuga_agent.py +730 -0
  71. lfx/components/data/__init__.py +78 -28
  72. lfx/components/data_source/__init__.py +58 -0
  73. lfx/components/{data → data_source}/api_request.py +26 -3
  74. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  75. lfx/components/{data → data_source}/json_to_data.py +15 -8
  76. lfx/components/{data → data_source}/news_search.py +1 -1
  77. lfx/components/{data → data_source}/rss.py +1 -1
  78. lfx/components/{data → data_source}/sql_executor.py +1 -1
  79. lfx/components/{data → data_source}/url.py +1 -1
  80. lfx/components/{data → data_source}/web_search.py +1 -1
  81. lfx/components/datastax/__init__.py +12 -6
  82. lfx/components/datastax/{astra_assistant_manager.py → astradb_assistant_manager.py} +1 -0
  83. lfx/components/datastax/astradb_chatmemory.py +40 -0
  84. lfx/components/datastax/astradb_cql.py +6 -32
  85. lfx/components/datastax/astradb_graph.py +10 -124
  86. lfx/components/datastax/astradb_tool.py +13 -53
  87. lfx/components/datastax/astradb_vectorstore.py +134 -977
  88. lfx/components/datastax/create_assistant.py +1 -0
  89. lfx/components/datastax/create_thread.py +1 -0
  90. lfx/components/datastax/dotenv.py +1 -0
  91. lfx/components/datastax/get_assistant.py +1 -0
  92. lfx/components/datastax/getenvvar.py +1 -0
  93. lfx/components/datastax/graph_rag.py +1 -1
  94. lfx/components/datastax/hcd.py +1 -1
  95. lfx/components/datastax/list_assistants.py +1 -0
  96. lfx/components/datastax/run.py +1 -0
  97. lfx/components/deactivated/json_document_builder.py +1 -1
  98. lfx/components/elastic/elasticsearch.py +1 -1
  99. lfx/components/elastic/opensearch_multimodal.py +1575 -0
  100. lfx/components/files_and_knowledge/__init__.py +47 -0
  101. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  102. lfx/components/{data → files_and_knowledge}/file.py +246 -18
  103. lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +17 -9
  104. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +18 -10
  105. lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
  106. lfx/components/flow_controls/__init__.py +58 -0
  107. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  108. lfx/components/{logic → flow_controls}/loop.py +47 -9
  109. lfx/components/flow_controls/run_flow.py +108 -0
  110. lfx/components/glean/glean_search_api.py +1 -1
  111. lfx/components/groq/groq.py +35 -28
  112. lfx/components/helpers/__init__.py +102 -0
  113. lfx/components/ibm/watsonx.py +25 -21
  114. lfx/components/input_output/__init__.py +3 -1
  115. lfx/components/input_output/chat.py +12 -3
  116. lfx/components/input_output/chat_output.py +12 -4
  117. lfx/components/input_output/text.py +1 -1
  118. lfx/components/input_output/text_output.py +1 -1
  119. lfx/components/{data → input_output}/webhook.py +1 -1
  120. lfx/components/knowledge_bases/__init__.py +59 -4
  121. lfx/components/langchain_utilities/character.py +1 -1
  122. lfx/components/langchain_utilities/csv_agent.py +84 -16
  123. lfx/components/langchain_utilities/json_agent.py +67 -12
  124. lfx/components/langchain_utilities/language_recursive.py +1 -1
  125. lfx/components/llm_operations/__init__.py +46 -0
  126. lfx/components/{processing → llm_operations}/batch_run.py +1 -1
  127. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  128. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  129. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  130. lfx/components/{processing → llm_operations}/structured_output.py +56 -18
  131. lfx/components/logic/__init__.py +126 -0
  132. lfx/components/mem0/mem0_chat_memory.py +11 -0
  133. lfx/components/mistral/mistral_embeddings.py +1 -1
  134. lfx/components/models/__init__.py +64 -9
  135. lfx/components/models_and_agents/__init__.py +49 -0
  136. lfx/components/{agents → models_and_agents}/agent.py +49 -6
  137. lfx/components/models_and_agents/embedding_model.py +423 -0
  138. lfx/components/models_and_agents/language_model.py +398 -0
  139. lfx/components/{agents → models_and_agents}/mcp_component.py +84 -45
  140. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  141. lfx/components/nvidia/system_assist.py +1 -1
  142. lfx/components/olivya/olivya.py +1 -1
  143. lfx/components/ollama/ollama.py +235 -14
  144. lfx/components/openrouter/openrouter.py +49 -147
  145. lfx/components/processing/__init__.py +9 -57
  146. lfx/components/processing/converter.py +1 -1
  147. lfx/components/processing/dataframe_operations.py +1 -1
  148. lfx/components/processing/parse_json_data.py +2 -2
  149. lfx/components/processing/parser.py +7 -2
  150. lfx/components/processing/split_text.py +1 -1
  151. lfx/components/qdrant/qdrant.py +1 -1
  152. lfx/components/redis/redis.py +1 -1
  153. lfx/components/twelvelabs/split_video.py +10 -0
  154. lfx/components/twelvelabs/video_file.py +12 -0
  155. lfx/components/utilities/__init__.py +43 -0
  156. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  157. lfx/components/{helpers → utilities}/current_date.py +1 -1
  158. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  159. lfx/components/vectorstores/__init__.py +0 -6
  160. lfx/components/vectorstores/local_db.py +9 -0
  161. lfx/components/youtube/youtube_transcripts.py +118 -30
  162. lfx/custom/custom_component/component.py +60 -3
  163. lfx/custom/custom_component/custom_component.py +68 -6
  164. lfx/field_typing/constants.py +1 -0
  165. lfx/graph/edge/base.py +45 -22
  166. lfx/graph/graph/base.py +5 -2
  167. lfx/graph/graph/schema.py +3 -2
  168. lfx/graph/state/model.py +15 -2
  169. lfx/graph/utils.py +6 -0
  170. lfx/graph/vertex/base.py +4 -1
  171. lfx/graph/vertex/param_handler.py +10 -7
  172. lfx/graph/vertex/vertex_types.py +1 -1
  173. lfx/helpers/__init__.py +12 -0
  174. lfx/helpers/flow.py +117 -0
  175. lfx/inputs/input_mixin.py +24 -1
  176. lfx/inputs/inputs.py +13 -1
  177. lfx/interface/components.py +161 -83
  178. lfx/io/schema.py +6 -0
  179. lfx/log/logger.py +5 -3
  180. lfx/schema/schema.py +5 -0
  181. lfx/services/database/__init__.py +5 -0
  182. lfx/services/database/service.py +25 -0
  183. lfx/services/deps.py +87 -22
  184. lfx/services/manager.py +19 -6
  185. lfx/services/mcp_composer/service.py +998 -157
  186. lfx/services/session.py +5 -0
  187. lfx/services/settings/base.py +51 -7
  188. lfx/services/settings/constants.py +8 -0
  189. lfx/services/storage/local.py +76 -46
  190. lfx/services/storage/service.py +152 -29
  191. lfx/template/field/base.py +3 -0
  192. lfx/utils/ssrf_protection.py +384 -0
  193. lfx/utils/validate_cloud.py +26 -0
  194. {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
  195. {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +210 -196
  196. {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
  197. lfx/components/agents/cuga_agent.py +0 -1013
  198. lfx/components/datastax/astra_db.py +0 -77
  199. lfx/components/datastax/cassandra.py +0 -92
  200. lfx/components/logic/run_flow.py +0 -71
  201. lfx/components/models/embedding_model.py +0 -114
  202. lfx/components/models/language_model.py +0 -144
  203. lfx/components/vectorstores/astradb_graph.py +0 -326
  204. lfx/components/vectorstores/cassandra.py +0 -264
  205. lfx/components/vectorstores/cassandra_graph.py +0 -238
  206. lfx/components/vectorstores/chroma.py +0 -167
  207. lfx/components/vectorstores/clickhouse.py +0 -135
  208. lfx/components/vectorstores/couchbase.py +0 -102
  209. lfx/components/vectorstores/elasticsearch.py +0 -267
  210. lfx/components/vectorstores/faiss.py +0 -111
  211. lfx/components/vectorstores/graph_rag.py +0 -141
  212. lfx/components/vectorstores/hcd.py +0 -314
  213. lfx/components/vectorstores/milvus.py +0 -115
  214. lfx/components/vectorstores/mongodb_atlas.py +0 -213
  215. lfx/components/vectorstores/opensearch.py +0 -243
  216. lfx/components/vectorstores/pgvector.py +0 -72
  217. lfx/components/vectorstores/pinecone.py +0 -134
  218. lfx/components/vectorstores/qdrant.py +0 -109
  219. lfx/components/vectorstores/supabase.py +0 -76
  220. lfx/components/vectorstores/upstash.py +0 -124
  221. lfx/components/vectorstores/vectara.py +0 -97
  222. lfx/components/vectorstores/vectara_rag.py +0 -164
  223. lfx/components/vectorstores/weaviate.py +0 -89
  224. /lfx/components/{data → data_source}/mock_data.py +0 -0
  225. /lfx/components/datastax/{astra_vectorize.py → astradb_vectorize.py} +0 -0
  226. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  227. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  228. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  229. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  230. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  231. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  232. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  233. /lfx/components/{helpers → processing}/create_list.py +0 -0
  234. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  235. /lfx/components/{helpers → processing}/store_message.py +0 -0
  236. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  237. {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
@@ -1,6 +1,9 @@
1
- from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
1
+ import contextlib
2
+ import tempfile
3
+ from pathlib import Path
2
4
 
3
5
  from lfx.base.agents.agent import LCAgentComponent
6
+ from lfx.base.data.storage_utils import read_file_bytes
4
7
  from lfx.field_typing import AgentExecutor
5
8
  from lfx.inputs.inputs import (
6
9
  DictInput,
@@ -10,7 +13,9 @@ from lfx.inputs.inputs import (
10
13
  MessageTextInput,
11
14
  )
12
15
  from lfx.schema.message import Message
16
+ from lfx.services.deps import get_settings_service
13
17
  from lfx.template.field.base import Output
18
+ from lfx.utils.async_helpers import run_until_complete
14
19
 
15
20
 
16
21
  class CSVAgentComponent(LCAgentComponent):
@@ -70,32 +75,60 @@ class CSVAgentComponent(LCAgentComponent):
70
75
  return self.path
71
76
 
72
77
  def build_agent_response(self) -> Message:
73
- agent_kwargs = {
74
- "verbose": self.verbose,
75
- "allow_dangerous_code": True,
76
- }
78
+ """Build and execute the CSV agent, returning the response."""
79
+ try:
80
+ from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
81
+ except ImportError as e:
82
+ msg = (
83
+ "langchain-experimental is not installed. Please install it with `pip install langchain-experimental`."
84
+ )
85
+ raise ImportError(msg) from e
77
86
 
78
- agent_csv = create_csv_agent(
79
- llm=self.llm,
80
- path=self._path(),
81
- agent_type=self.agent_type,
82
- handle_parsing_errors=self.handle_parsing_errors,
83
- pandas_kwargs=self.pandas_kwargs,
84
- **agent_kwargs,
85
- )
87
+ try:
88
+ agent_kwargs = {
89
+ "verbose": self.verbose,
90
+ "allow_dangerous_code": True,
91
+ }
92
+
93
+ # Get local path (downloads from S3 if needed)
94
+ local_path = self._get_local_path()
86
95
 
87
- result = agent_csv.invoke({"input": self.input_value})
88
- return Message(text=str(result["output"]))
96
+ agent_csv = create_csv_agent(
97
+ llm=self.llm,
98
+ path=local_path,
99
+ agent_type=self.agent_type,
100
+ handle_parsing_errors=self.handle_parsing_errors,
101
+ pandas_kwargs=self.pandas_kwargs,
102
+ **agent_kwargs,
103
+ )
104
+
105
+ result = agent_csv.invoke({"input": self.input_value})
106
+ return Message(text=str(result["output"]))
107
+
108
+ finally:
109
+ # Clean up temp file if created
110
+ self._cleanup_temp_file()
89
111
 
90
112
  def build_agent(self) -> AgentExecutor:
113
+ try:
114
+ from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
115
+ except ImportError as e:
116
+ msg = (
117
+ "langchain-experimental is not installed. Please install it with `pip install langchain-experimental`."
118
+ )
119
+ raise ImportError(msg) from e
120
+
91
121
  agent_kwargs = {
92
122
  "verbose": self.verbose,
93
123
  "allow_dangerous_code": True,
94
124
  }
95
125
 
126
+ # Get local path (downloads from S3 if needed)
127
+ local_path = self._get_local_path()
128
+
96
129
  agent_csv = create_csv_agent(
97
130
  llm=self.llm,
98
- path=self._path(),
131
+ path=local_path,
99
132
  agent_type=self.agent_type,
100
133
  handle_parsing_errors=self.handle_parsing_errors,
101
134
  pandas_kwargs=self.pandas_kwargs,
@@ -104,4 +137,39 @@ class CSVAgentComponent(LCAgentComponent):
104
137
 
105
138
  self.status = Message(text=str(agent_csv))
106
139
 
140
+ # Note: Temp file will be cleaned up when the component is destroyed or
141
+ # when build_agent_response is called
107
142
  return agent_csv
143
+
144
+ def _get_local_path(self) -> str:
145
+ """Get a local file path, downloading from S3 storage if necessary.
146
+
147
+ Returns:
148
+ str: Local file path that can be used by LangChain
149
+ """
150
+ file_path = self._path()
151
+ settings = get_settings_service().settings
152
+
153
+ # If using S3 storage, download the file to temp
154
+ if settings.storage_type == "s3":
155
+ # Download from S3 to temp file
156
+ csv_bytes = run_until_complete(read_file_bytes(file_path))
157
+
158
+ # Create temp file with .csv extension
159
+ suffix = Path(file_path.split("/")[-1]).suffix or ".csv"
160
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=suffix, delete=False) as tmp_file:
161
+ tmp_file.write(csv_bytes)
162
+ temp_path = tmp_file.name
163
+
164
+ # Store temp path for cleanup
165
+ self._temp_file_path = temp_path
166
+ return temp_path
167
+
168
+ # Local storage - return path as-is
169
+ return file_path
170
+
171
+ def _cleanup_temp_file(self) -> None:
172
+ """Clean up temporary file if one was created."""
173
+ if hasattr(self, "_temp_file_path"):
174
+ with contextlib.suppress(Exception):
175
+ Path(self._temp_file_path).unlink() # Ignore cleanup errors
@@ -1,13 +1,15 @@
1
+ import contextlib
2
+ import tempfile
1
3
  from pathlib import Path
2
4
 
3
5
  import yaml
4
6
  from langchain.agents import AgentExecutor
5
- from langchain_community.agent_toolkits import create_json_agent
6
- from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
7
- from langchain_community.tools.json.tool import JsonSpec
8
7
 
9
8
  from lfx.base.agents.agent import LCAgentComponent
9
+ from lfx.base.data.storage_utils import read_file_bytes
10
10
  from lfx.inputs.inputs import FileInput, HandleInput
11
+ from lfx.services.deps import get_settings_service
12
+ from lfx.utils.async_helpers import run_until_complete
11
13
 
12
14
 
13
15
  class JsonAgentComponent(LCAgentComponent):
@@ -32,14 +34,67 @@ class JsonAgentComponent(LCAgentComponent):
32
34
  ),
33
35
  ]
34
36
 
37
+ def _get_local_path(self) -> Path:
38
+ """Get a local file path, downloading from S3 storage if necessary.
39
+
40
+ Returns:
41
+ Path: Local file path that can be used by LangChain
42
+ """
43
+ file_path = self.path
44
+ settings = get_settings_service().settings
45
+
46
+ # If using S3 storage, download the file to temp
47
+ if settings.storage_type == "s3":
48
+ # Download from S3 to temp file
49
+ file_bytes = run_until_complete(read_file_bytes(file_path))
50
+
51
+ # Create temp file with appropriate extension
52
+ suffix = Path(file_path.split("/")[-1]).suffix or ".json"
53
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=suffix, delete=False) as tmp_file:
54
+ tmp_file.write(file_bytes)
55
+ temp_path = tmp_file.name
56
+
57
+ # Store temp path for cleanup
58
+ self._temp_file_path = temp_path
59
+ return Path(temp_path)
60
+
61
+ # Local storage - return as Path
62
+ return Path(file_path)
63
+
64
+ def _cleanup_temp_file(self) -> None:
65
+ """Clean up temporary file if one was created."""
66
+ if hasattr(self, "_temp_file_path"):
67
+ with contextlib.suppress(Exception):
68
+ Path(self._temp_file_path).unlink() # Ignore cleanup errors
69
+
35
70
  def build_agent(self) -> AgentExecutor:
36
- path = Path(self.path)
37
- if path.suffix in {"yaml", "yml"}:
38
- with path.open(encoding="utf-8") as file:
39
- yaml_dict = yaml.safe_load(file)
40
- spec = JsonSpec(dict_=yaml_dict)
41
- else:
42
- spec = JsonSpec.from_file(path)
43
- toolkit = JsonToolkit(spec=spec)
71
+ """Build the JSON agent executor."""
72
+ try:
73
+ from langchain_community.agent_toolkits import create_json_agent
74
+ from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
75
+ from langchain_community.tools.json.tool import JsonSpec
76
+ except ImportError as e:
77
+ msg = "langchain-community is not installed. Please install it with `pip install langchain-community`."
78
+ raise ImportError(msg) from e
79
+
80
+ try:
81
+ # Get local path (downloads from S3 if needed)
82
+ path = self._get_local_path()
44
83
 
45
- return create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs())
84
+ if path.suffix in {".yaml", ".yml"}:
85
+ with path.open(encoding="utf-8") as file:
86
+ yaml_dict = yaml.safe_load(file)
87
+ spec = JsonSpec(dict_=yaml_dict)
88
+ else:
89
+ spec = JsonSpec.from_file(str(path))
90
+ toolkit = JsonToolkit(spec=spec)
91
+
92
+ agent = create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs())
93
+ except Exception:
94
+ # Make sure to clean up temp file on error
95
+ self._cleanup_temp_file()
96
+ raise
97
+ else:
98
+ # Clean up temp file after agent is created
99
+ self._cleanup_temp_file()
100
+ return agent
@@ -9,7 +9,7 @@ from lfx.inputs.inputs import DataInput, DropdownInput, IntInput
9
9
  class LanguageRecursiveTextSplitterComponent(LCTextSplitterComponent):
10
10
  display_name: str = "Language Recursive Text Splitter"
11
11
  description: str = "Split text into chunks of a specified length based on language."
12
- documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter"
12
+ documentation: str = "https://docs.langflow.org/bundles-langchain"
13
13
  name = "LanguageRecursiveTextSplitter"
14
14
  icon = "LangChain"
15
15
 
@@ -0,0 +1,46 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from lfx.components._importing import import_mod
6
+
7
+ if TYPE_CHECKING:
8
+ from lfx.components.llm_operations.batch_run import BatchRunComponent
9
+ from lfx.components.llm_operations.lambda_filter import SmartTransformComponent
10
+ from lfx.components.llm_operations.llm_conditional_router import SmartRouterComponent
11
+ from lfx.components.llm_operations.llm_selector import LLMSelectorComponent
12
+ from lfx.components.llm_operations.structured_output import StructuredOutputComponent
13
+
14
+ _dynamic_imports = {
15
+ "BatchRunComponent": "batch_run",
16
+ "SmartTransformComponent": "lambda_filter",
17
+ "SmartRouterComponent": "llm_conditional_router",
18
+ "LLMSelectorComponent": "llm_selector",
19
+ "StructuredOutputComponent": "structured_output",
20
+ }
21
+
22
+ __all__ = [
23
+ "BatchRunComponent",
24
+ "LLMSelectorComponent",
25
+ "SmartRouterComponent",
26
+ "SmartTransformComponent",
27
+ "StructuredOutputComponent",
28
+ ]
29
+
30
+
31
+ def __getattr__(attr_name: str) -> Any:
32
+ """Lazily import LLM operation components on attribute access."""
33
+ if attr_name not in _dynamic_imports:
34
+ msg = f"module '{__name__}' has no attribute '{attr_name}'"
35
+ raise AttributeError(msg)
36
+ try:
37
+ result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
38
+ except (ModuleNotFoundError, ImportError, AttributeError) as e:
39
+ msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
40
+ raise AttributeError(msg) from e
41
+ globals()[attr_name] = result
42
+ return result
43
+
44
+
45
+ def __dir__() -> list[str]:
46
+ return list(__all__)
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
  class BatchRunComponent(Component):
17
17
  display_name = "Batch Run"
18
18
  description = "Runs an LLM on each row of a DataFrame column. If no column is specified, all columns are used."
19
- documentation: str = "https://docs.langflow.org/components-processing#batch-run"
19
+ documentation: str = "https://docs.langflow.org/batch-run"
20
20
  icon = "List"
21
21
 
22
22
  inputs = [
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
  class LambdaFilterComponent(Component):
17
17
  display_name = "Smart Transform"
18
18
  description = "Uses an LLM to generate a function for filtering or transforming structured data."
19
- documentation: str = "https://docs.langflow.org/components-processing#smart-transform"
19
+ documentation: str = "https://docs.langflow.org/smart-transform"
20
20
  icon = "square-function"
21
21
  name = "Smart Transform"
22
22
 
@@ -9,7 +9,7 @@ from lfx.schema.table import EditMode
9
9
  class SmartRouterComponent(Component):
10
10
  display_name = "Smart Router"
11
11
  description = "Routes an input message using LLM-based categorization."
12
- icon = "equal"
12
+ icon = "route"
13
13
  name = "SmartRouter"
14
14
 
15
15
  def __init__(self, **kwargs):
@@ -14,10 +14,10 @@ from lfx.schema.message import Message
14
14
  from lfx.template.field.base import Output
15
15
 
16
16
 
17
- class LLMRouterComponent(Component):
18
- display_name = "LLM Router"
17
+ class LLMSelectorComponent(Component):
18
+ display_name = "LLM Selector"
19
19
  description = "Routes the input to the most appropriate LLM based on OpenRouter model specifications"
20
- documentation: str = "https://docs.langflow.org/components-processing#llm-router"
20
+ documentation: str = "https://docs.langflow.org/llm-selector"
21
21
  icon = "git-branch"
22
22
 
23
23
  # Constants for magic values
@@ -11,6 +11,7 @@ from lfx.io import (
11
11
  Output,
12
12
  TableInput,
13
13
  )
14
+ from lfx.log.logger import logger
14
15
  from lfx.schema.data import Data
15
16
  from lfx.schema.dataframe import DataFrame
16
17
  from lfx.schema.table import EditMode
@@ -19,7 +20,7 @@ from lfx.schema.table import EditMode
19
20
  class StructuredOutputComponent(Component):
20
21
  display_name = "Structured Output"
21
22
  description = "Uses an LLM to generate structured data. Ideal for extraction and consistency."
22
- documentation: str = "https://docs.langflow.org/components-processing#structured-output"
23
+ documentation: str = "https://docs.langflow.org/structured-output"
23
24
  name = "StructuredOutput"
24
25
  icon = "braces"
25
26
 
@@ -136,30 +137,27 @@ class StructuredOutputComponent(Component):
136
137
  raise ValueError(msg)
137
138
 
138
139
  output_model_ = build_model_from_schema(self.output_schema)
139
-
140
140
  output_model = create_model(
141
141
  schema_name,
142
142
  __doc__=f"A list of {schema_name}.",
143
- objects=(list[output_model_], Field(description=f"A list of {schema_name}.")), # type: ignore[valid-type]
143
+ objects=(
144
+ list[output_model_],
145
+ Field(
146
+ description=f"A list of {schema_name}.", # type: ignore[valid-type]
147
+ min_length=1, # help ensure non-empty output
148
+ ),
149
+ ),
144
150
  )
145
-
146
- try:
147
- llm_with_structured_output = create_extractor(self.llm, tools=[output_model])
148
- except NotImplementedError as exc:
149
- msg = f"{self.llm.__class__.__name__} does not support structured output."
150
- raise TypeError(msg) from exc
151
-
151
+ # Tracing config
152
152
  config_dict = {
153
153
  "run_name": self.display_name,
154
154
  "project_name": self.get_project_name(),
155
155
  "callbacks": self.get_langchain_callbacks(),
156
156
  }
157
- result = get_chat_result(
158
- runnable=llm_with_structured_output,
159
- system_message=self.system_prompt,
160
- input_value=self.input_value,
161
- config=config_dict,
162
- )
157
+ # Generate structured output using Trustcall first, then fallback to Langchain if it fails
158
+ result = self._extract_output_with_trustcall(output_model, config_dict)
159
+ if result is None:
160
+ result = self._extract_output_with_langchain(output_model, config_dict)
163
161
 
164
162
  # OPTIMIZATION NOTE: Simplified processing based on trustcall response structure
165
163
  # Handle non-dict responses (shouldn't happen with trustcall, but defensive)
@@ -173,8 +171,9 @@ class StructuredOutputComponent(Component):
173
171
 
174
172
  # Convert BaseModel to dict (creates the "objects" key)
175
173
  first_response = responses[0]
176
- structured_data = first_response.model_dump() if isinstance(first_response, BaseModel) else first_response
177
-
174
+ structured_data = first_response
175
+ if isinstance(first_response, BaseModel):
176
+ structured_data = first_response.model_dump()
178
177
  # Extract the objects array (guaranteed to exist due to our Pydantic model structure)
179
178
  return structured_data.get("objects", structured_data)
180
179
 
@@ -204,3 +203,42 @@ class StructuredOutputComponent(Component):
204
203
  # Multiple outputs - convert to DataFrame directly
205
204
  return DataFrame(output)
206
205
  return DataFrame()
206
+
207
+ def _extract_output_with_trustcall(self, schema: BaseModel, config_dict: dict) -> list[BaseModel] | None:
208
+ try:
209
+ llm_with_structured_output = create_extractor(self.llm, tools=[schema], tool_choice=schema.__name__)
210
+ result = get_chat_result(
211
+ runnable=llm_with_structured_output,
212
+ system_message=self.system_prompt,
213
+ input_value=self.input_value,
214
+ config=config_dict,
215
+ )
216
+ except Exception as e: # noqa: BLE001
217
+ logger.warning(
218
+ f"Trustcall extraction failed, falling back to Langchain: {e} "
219
+ "(Note: This may not be an error—some models or configurations do not support tool calling. "
220
+ "Falling back is normal in such cases.)"
221
+ )
222
+ return None
223
+ return result or None # langchain fallback is used if error occurs or the result is empty
224
+
225
+ def _extract_output_with_langchain(self, schema: BaseModel, config_dict: dict) -> list[BaseModel] | None:
226
+ try:
227
+ llm_with_structured_output = self.llm.with_structured_output(schema)
228
+ result = get_chat_result(
229
+ runnable=llm_with_structured_output,
230
+ system_message=self.system_prompt,
231
+ input_value=self.input_value,
232
+ config=config_dict,
233
+ )
234
+ if isinstance(result, BaseModel):
235
+ result = result.model_dump()
236
+ result = result.get("objects", result)
237
+ except Exception as fallback_error:
238
+ msg = (
239
+ f"Model does not support tool calling (trustcall failed) "
240
+ f"and fallback with_structured_output also failed: {fallback_error}"
241
+ )
242
+ raise ValueError(msg) from fallback_error
243
+
244
+ return result or None
@@ -1,5 +1,12 @@
1
+ """Logic module - backwards compatibility alias for flow_controls.
2
+
3
+ This module provides backwards compatibility by forwarding imports
4
+ to flow_controls where the actual logic components are located.
5
+ """
6
+
1
7
  from __future__ import annotations
2
8
 
9
+ import sys
3
10
  from typing import TYPE_CHECKING, Any
4
11
 
5
12
  from lfx.components._importing import import_mod
@@ -36,12 +43,131 @@ __all__ = [
36
43
  "SubFlowComponent",
37
44
  ]
38
45
 
46
+ # Register redirected submodules in sys.modules for direct importlib.import_module() calls
47
+ # This allows imports like: import lfx.components.logic.listen
48
+ _redirected_submodules = {
49
+ "lfx.components.logic.listen": "lfx.components.flow_controls.listen",
50
+ "lfx.components.logic.loop": "lfx.components.flow_controls.loop",
51
+ "lfx.components.logic.notify": "lfx.components.flow_controls.notify",
52
+ "lfx.components.logic.pass_message": "lfx.components.flow_controls.pass_message",
53
+ "lfx.components.logic.conditional_router": "lfx.components.flow_controls.conditional_router",
54
+ "lfx.components.logic.data_conditional_router": "lfx.components.flow_controls.data_conditional_router",
55
+ "lfx.components.logic.flow_tool": "lfx.components.flow_controls.flow_tool",
56
+ "lfx.components.logic.run_flow": "lfx.components.flow_controls.run_flow",
57
+ "lfx.components.logic.sub_flow": "lfx.components.flow_controls.sub_flow",
58
+ }
59
+
60
+ for old_path, new_path in _redirected_submodules.items():
61
+ if old_path not in sys.modules:
62
+ # Use a lazy loader that imports the actual module when accessed
63
+ class _RedirectedModule:
64
+ def __init__(self, target_path: str, original_path: str):
65
+ self._target_path = target_path
66
+ self._original_path = original_path
67
+ self._module = None
68
+
69
+ def __getattr__(self, name: str) -> Any:
70
+ if self._module is None:
71
+ from importlib import import_module
72
+
73
+ self._module = import_module(self._target_path)
74
+ # Also register under the original path for future imports
75
+ sys.modules[self._original_path] = self._module
76
+ return getattr(self._module, name)
77
+
78
+ def __repr__(self) -> str:
79
+ return f"<redirected module '{self._original_path}' -> '{self._target_path}'>"
80
+
81
+ sys.modules[old_path] = _RedirectedModule(new_path, old_path)
82
+
39
83
 
40
84
  def __getattr__(attr_name: str) -> Any:
41
85
  """Lazily import logic components on attribute access."""
86
+ # Handle submodule access for backwards compatibility
87
+ if attr_name == "listen":
88
+ from importlib import import_module
89
+
90
+ result = import_module("lfx.components.flow_controls.listen")
91
+ globals()[attr_name] = result
92
+ return result
93
+ if attr_name == "loop":
94
+ from importlib import import_module
95
+
96
+ result = import_module("lfx.components.flow_controls.loop")
97
+ globals()[attr_name] = result
98
+ return result
99
+ if attr_name == "notify":
100
+ from importlib import import_module
101
+
102
+ result = import_module("lfx.components.flow_controls.notify")
103
+ globals()[attr_name] = result
104
+ return result
105
+ if attr_name == "pass_message":
106
+ from importlib import import_module
107
+
108
+ result = import_module("lfx.components.flow_controls.pass_message")
109
+ globals()[attr_name] = result
110
+ return result
111
+ if attr_name == "conditional_router":
112
+ from importlib import import_module
113
+
114
+ result = import_module("lfx.components.flow_controls.conditional_router")
115
+ globals()[attr_name] = result
116
+ return result
117
+ if attr_name == "data_conditional_router":
118
+ from importlib import import_module
119
+
120
+ result = import_module("lfx.components.flow_controls.data_conditional_router")
121
+ globals()[attr_name] = result
122
+ return result
123
+ if attr_name == "flow_tool":
124
+ from importlib import import_module
125
+
126
+ result = import_module("lfx.components.flow_controls.flow_tool")
127
+ globals()[attr_name] = result
128
+ return result
129
+ if attr_name == "run_flow":
130
+ from importlib import import_module
131
+
132
+ result = import_module("lfx.components.flow_controls.run_flow")
133
+ globals()[attr_name] = result
134
+ return result
135
+ if attr_name == "sub_flow":
136
+ from importlib import import_module
137
+
138
+ result = import_module("lfx.components.flow_controls.sub_flow")
139
+ globals()[attr_name] = result
140
+ return result
141
+
42
142
  if attr_name not in _dynamic_imports:
43
143
  msg = f"module '{__name__}' has no attribute '{attr_name}'"
44
144
  raise AttributeError(msg)
145
+
146
+ # Most logic components were moved to flow_controls
147
+ # Forward them to flow_controls for backwards compatibility
148
+ if attr_name in (
149
+ "ConditionalRouterComponent",
150
+ "DataConditionalRouterComponent",
151
+ "FlowToolComponent",
152
+ "LoopComponent",
153
+ "PassMessageComponent",
154
+ "RunFlowComponent",
155
+ "SubFlowComponent",
156
+ ):
157
+ from lfx.components import flow_controls
158
+
159
+ result = getattr(flow_controls, attr_name)
160
+ globals()[attr_name] = result
161
+ return result
162
+
163
+ # SmartRouterComponent was moved to llm_operations
164
+ if attr_name == "SmartRouterComponent":
165
+ from lfx.components import llm_operations
166
+
167
+ result = getattr(llm_operations, attr_name)
168
+ globals()[attr_name] = result
169
+ return result
170
+
45
171
  try:
46
172
  result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
47
173
  except (ModuleNotFoundError, ImportError, AttributeError) as e:
@@ -7,6 +7,11 @@ from lfx.inputs.inputs import DictInput, HandleInput, MessageTextInput, NestedDi
7
7
  from lfx.io import Output
8
8
  from lfx.log.logger import logger
9
9
  from lfx.schema.data import Data
10
+ from lfx.utils.validate_cloud import raise_error_if_astra_cloud_disable_component
11
+
12
+ disable_component_in_astra_cloud_msg = (
13
+ "Mem0 chat memory is not supported in Astra cloud environment. Please use local storage mode or mem0 cloud."
14
+ )
10
15
 
11
16
 
12
17
  class Mem0MemoryComponent(LCChatMemoryComponent):
@@ -80,6 +85,8 @@ class Mem0MemoryComponent(LCChatMemoryComponent):
80
85
 
81
86
  def build_mem0(self) -> Memory:
82
87
  """Initializes a Mem0 memory instance based on provided configuration and API keys."""
88
+ # Check if we're in Astra cloud environment and raise an error if we are.
89
+ raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
83
90
  if self.openai_api_key:
84
91
  os.environ["OPENAI_API_KEY"] = self.openai_api_key
85
92
 
@@ -95,6 +102,8 @@ class Mem0MemoryComponent(LCChatMemoryComponent):
95
102
 
96
103
  def ingest_data(self) -> Memory:
97
104
  """Ingests a new message into Mem0 memory and returns the updated memory instance."""
105
+ # Check if we're in Astra cloud environment and raise an error if we are.
106
+ raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
98
107
  mem0_memory = self.existing_memory or self.build_mem0()
99
108
 
100
109
  if not self.ingest_message or not self.user_id:
@@ -115,6 +124,8 @@ class Mem0MemoryComponent(LCChatMemoryComponent):
115
124
 
116
125
  def build_search_results(self) -> Data:
117
126
  """Searches the Mem0 memory for related messages based on the search query and returns the results."""
127
+ # Check if we're in Astra cloud environment and raise an error if we are.
128
+ raise_error_if_astra_cloud_disable_component(disable_component_in_astra_cloud_msg)
118
129
  mem0_memory = self.ingest_data()
119
130
  search_query = self.search_query
120
131
  user_id = self.user_id
@@ -1,4 +1,4 @@
1
- from langchain_mistralai.embeddings import MistralAIEmbeddings
1
+ from langchain_mistralai import MistralAIEmbeddings
2
2
  from pydantic.v1 import SecretStr
3
3
 
4
4
  from lfx.base.models.model import LCModelComponent