lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +13 -1
  3. lfx/base/agents/altk_base_agent.py +380 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +2 -1
  6. lfx/base/composio/composio_base.py +159 -224
  7. lfx/base/data/base_file.py +88 -21
  8. lfx/base/data/storage_utils.py +192 -0
  9. lfx/base/data/utils.py +178 -14
  10. lfx/base/embeddings/embeddings_class.py +113 -0
  11. lfx/base/models/groq_constants.py +74 -58
  12. lfx/base/models/groq_model_discovery.py +265 -0
  13. lfx/base/models/model.py +1 -1
  14. lfx/base/models/model_utils.py +100 -0
  15. lfx/base/models/openai_constants.py +7 -0
  16. lfx/base/models/watsonx_constants.py +32 -8
  17. lfx/base/tools/run_flow.py +601 -129
  18. lfx/cli/commands.py +6 -3
  19. lfx/cli/common.py +2 -2
  20. lfx/cli/run.py +1 -1
  21. lfx/cli/script_loader.py +53 -11
  22. lfx/components/Notion/create_page.py +1 -1
  23. lfx/components/Notion/list_database_properties.py +1 -1
  24. lfx/components/Notion/list_pages.py +1 -1
  25. lfx/components/Notion/list_users.py +1 -1
  26. lfx/components/Notion/page_content_viewer.py +1 -1
  27. lfx/components/Notion/search.py +1 -1
  28. lfx/components/Notion/update_page_property.py +1 -1
  29. lfx/components/__init__.py +19 -5
  30. lfx/components/{agents → altk}/__init__.py +5 -9
  31. lfx/components/altk/altk_agent.py +193 -0
  32. lfx/components/apify/apify_actor.py +1 -1
  33. lfx/components/composio/__init__.py +70 -18
  34. lfx/components/composio/apollo_composio.py +11 -0
  35. lfx/components/composio/bitbucket_composio.py +11 -0
  36. lfx/components/composio/canva_composio.py +11 -0
  37. lfx/components/composio/coda_composio.py +11 -0
  38. lfx/components/composio/composio_api.py +10 -0
  39. lfx/components/composio/discord_composio.py +1 -1
  40. lfx/components/composio/elevenlabs_composio.py +11 -0
  41. lfx/components/composio/exa_composio.py +11 -0
  42. lfx/components/composio/firecrawl_composio.py +11 -0
  43. lfx/components/composio/fireflies_composio.py +11 -0
  44. lfx/components/composio/gmail_composio.py +1 -1
  45. lfx/components/composio/googlebigquery_composio.py +11 -0
  46. lfx/components/composio/googlecalendar_composio.py +1 -1
  47. lfx/components/composio/googledocs_composio.py +1 -1
  48. lfx/components/composio/googlemeet_composio.py +1 -1
  49. lfx/components/composio/googlesheets_composio.py +1 -1
  50. lfx/components/composio/googletasks_composio.py +1 -1
  51. lfx/components/composio/heygen_composio.py +11 -0
  52. lfx/components/composio/mem0_composio.py +11 -0
  53. lfx/components/composio/peopledatalabs_composio.py +11 -0
  54. lfx/components/composio/perplexityai_composio.py +11 -0
  55. lfx/components/composio/serpapi_composio.py +11 -0
  56. lfx/components/composio/slack_composio.py +3 -574
  57. lfx/components/composio/slackbot_composio.py +1 -1
  58. lfx/components/composio/snowflake_composio.py +11 -0
  59. lfx/components/composio/tavily_composio.py +11 -0
  60. lfx/components/composio/youtube_composio.py +2 -2
  61. lfx/components/cuga/__init__.py +34 -0
  62. lfx/components/cuga/cuga_agent.py +730 -0
  63. lfx/components/data/__init__.py +78 -28
  64. lfx/components/data_source/__init__.py +58 -0
  65. lfx/components/{data → data_source}/api_request.py +26 -3
  66. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  67. lfx/components/{data → data_source}/json_to_data.py +15 -8
  68. lfx/components/{data → data_source}/news_search.py +1 -1
  69. lfx/components/{data → data_source}/rss.py +1 -1
  70. lfx/components/{data → data_source}/sql_executor.py +1 -1
  71. lfx/components/{data → data_source}/url.py +1 -1
  72. lfx/components/{data → data_source}/web_search.py +1 -1
  73. lfx/components/datastax/astradb_cql.py +1 -1
  74. lfx/components/datastax/astradb_graph.py +1 -1
  75. lfx/components/datastax/astradb_tool.py +1 -1
  76. lfx/components/datastax/astradb_vectorstore.py +1 -1
  77. lfx/components/datastax/hcd.py +1 -1
  78. lfx/components/deactivated/json_document_builder.py +1 -1
  79. lfx/components/docling/__init__.py +0 -3
  80. lfx/components/elastic/elasticsearch.py +1 -1
  81. lfx/components/elastic/opensearch_multimodal.py +1575 -0
  82. lfx/components/files_and_knowledge/__init__.py +47 -0
  83. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  84. lfx/components/{data → files_and_knowledge}/file.py +246 -18
  85. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
  86. lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
  87. lfx/components/flow_controls/__init__.py +58 -0
  88. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  89. lfx/components/{logic → flow_controls}/loop.py +43 -9
  90. lfx/components/flow_controls/run_flow.py +108 -0
  91. lfx/components/glean/glean_search_api.py +1 -1
  92. lfx/components/groq/groq.py +35 -28
  93. lfx/components/helpers/__init__.py +102 -0
  94. lfx/components/input_output/__init__.py +3 -1
  95. lfx/components/input_output/chat.py +4 -3
  96. lfx/components/input_output/chat_output.py +4 -4
  97. lfx/components/input_output/text.py +1 -1
  98. lfx/components/input_output/text_output.py +1 -1
  99. lfx/components/{data → input_output}/webhook.py +1 -1
  100. lfx/components/knowledge_bases/__init__.py +59 -4
  101. lfx/components/langchain_utilities/character.py +1 -1
  102. lfx/components/langchain_utilities/csv_agent.py +84 -16
  103. lfx/components/langchain_utilities/json_agent.py +67 -12
  104. lfx/components/langchain_utilities/language_recursive.py +1 -1
  105. lfx/components/llm_operations/__init__.py +46 -0
  106. lfx/components/{processing → llm_operations}/batch_run.py +1 -1
  107. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  108. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  109. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  110. lfx/components/{processing → llm_operations}/structured_output.py +1 -1
  111. lfx/components/logic/__init__.py +126 -0
  112. lfx/components/mem0/mem0_chat_memory.py +11 -0
  113. lfx/components/models/__init__.py +64 -9
  114. lfx/components/models_and_agents/__init__.py +49 -0
  115. lfx/components/{agents → models_and_agents}/agent.py +2 -2
  116. lfx/components/models_and_agents/embedding_model.py +423 -0
  117. lfx/components/models_and_agents/language_model.py +398 -0
  118. lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
  119. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  120. lfx/components/nvidia/system_assist.py +1 -1
  121. lfx/components/olivya/olivya.py +1 -1
  122. lfx/components/ollama/ollama.py +17 -3
  123. lfx/components/processing/__init__.py +9 -57
  124. lfx/components/processing/converter.py +1 -1
  125. lfx/components/processing/dataframe_operations.py +1 -1
  126. lfx/components/processing/parse_json_data.py +2 -2
  127. lfx/components/processing/parser.py +1 -1
  128. lfx/components/processing/split_text.py +1 -1
  129. lfx/components/qdrant/qdrant.py +1 -1
  130. lfx/components/redis/redis.py +1 -1
  131. lfx/components/twelvelabs/split_video.py +10 -0
  132. lfx/components/twelvelabs/video_file.py +12 -0
  133. lfx/components/utilities/__init__.py +43 -0
  134. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  135. lfx/components/{helpers → utilities}/current_date.py +1 -1
  136. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  137. lfx/components/vectorstores/local_db.py +9 -0
  138. lfx/components/youtube/youtube_transcripts.py +118 -30
  139. lfx/custom/custom_component/component.py +57 -1
  140. lfx/custom/custom_component/custom_component.py +68 -6
  141. lfx/graph/edge/base.py +43 -20
  142. lfx/graph/graph/base.py +4 -1
  143. lfx/graph/state/model.py +15 -2
  144. lfx/graph/utils.py +6 -0
  145. lfx/graph/vertex/base.py +4 -1
  146. lfx/graph/vertex/param_handler.py +10 -7
  147. lfx/helpers/__init__.py +12 -0
  148. lfx/helpers/flow.py +117 -0
  149. lfx/inputs/input_mixin.py +24 -1
  150. lfx/inputs/inputs.py +13 -1
  151. lfx/interface/components.py +161 -83
  152. lfx/log/logger.py +5 -3
  153. lfx/services/database/__init__.py +5 -0
  154. lfx/services/database/service.py +25 -0
  155. lfx/services/deps.py +87 -22
  156. lfx/services/manager.py +19 -6
  157. lfx/services/mcp_composer/service.py +998 -157
  158. lfx/services/session.py +5 -0
  159. lfx/services/settings/base.py +51 -7
  160. lfx/services/settings/constants.py +8 -0
  161. lfx/services/storage/local.py +76 -46
  162. lfx/services/storage/service.py +152 -29
  163. lfx/template/field/base.py +3 -0
  164. lfx/utils/ssrf_protection.py +384 -0
  165. lfx/utils/validate_cloud.py +26 -0
  166. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
  167. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +182 -150
  168. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
  169. lfx/components/agents/altk_agent.py +0 -366
  170. lfx/components/agents/cuga_agent.py +0 -1013
  171. lfx/components/docling/docling_remote_vlm.py +0 -284
  172. lfx/components/logic/run_flow.py +0 -71
  173. lfx/components/models/embedding_model.py +0 -195
  174. lfx/components/models/language_model.py +0 -144
  175. /lfx/components/{data → data_source}/mock_data.py +0 -0
  176. /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
  177. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  178. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  179. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  180. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  181. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  182. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  183. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  184. /lfx/components/{helpers → processing}/create_list.py +0 -0
  185. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  186. /lfx/components/{helpers → processing}/store_message.py +0 -0
  187. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  188. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
lfx/cli/commands.py CHANGED
@@ -6,10 +6,12 @@ import json
6
6
  import os
7
7
  import sys
8
8
  import tempfile
9
+ from functools import partial
9
10
  from pathlib import Path
10
11
 
11
12
  import typer
12
13
  import uvicorn
14
+ from asyncer import syncify
13
15
  from dotenv import load_dotenv
14
16
  from rich.console import Console
15
17
  from rich.panel import Panel
@@ -32,7 +34,8 @@ console = Console()
32
34
  API_KEY_MASK_LENGTH = 8
33
35
 
34
36
 
35
- def serve_command(
37
+ @partial(syncify, raise_sync_error=False)
38
+ async def serve_command(
36
39
  script_path: str | None = typer.Argument(
37
40
  None,
38
41
  help=(
@@ -201,12 +204,12 @@ def serve_command(
201
204
  raise typer.Exit(1)
202
205
 
203
206
  if resolved_path.suffix == ".json":
204
- graph = load_graph_from_path(resolved_path, resolved_path.suffix, verbose_print, verbose=verbose)
207
+ graph = await load_graph_from_path(resolved_path, resolved_path.suffix, verbose_print, verbose=verbose)
205
208
  elif resolved_path.suffix == ".py":
206
209
  verbose_print("Loading graph from Python script...")
207
210
  from lfx.cli.script_loader import load_graph_from_script
208
211
 
209
- graph = load_graph_from_script(resolved_path)
212
+ graph = await load_graph_from_script(resolved_path)
210
213
  verbose_print("✓ Graph loaded from Python script")
211
214
  else:
212
215
  err_msg = "Error: Only JSON flow files (.json) or Python scripts (.py) are supported. "
lfx/cli/common.py CHANGED
@@ -224,7 +224,7 @@ def validate_script_path(script_path: Path | str, verbose_print) -> tuple[str, P
224
224
  return file_extension, script_path
225
225
 
226
226
 
227
- def load_graph_from_path(script_path: Path, file_extension: str, verbose_print, *, verbose: bool = False):
227
+ async def load_graph_from_path(script_path: Path, file_extension: str, verbose_print, *, verbose: bool = False):
228
228
  """Load a graph from a Python script or JSON file.
229
229
 
230
230
  Args:
@@ -259,7 +259,7 @@ def load_graph_from_path(script_path: Path, file_extension: str, verbose_print,
259
259
  raise ValueError(error_msg)
260
260
 
261
261
  verbose_print("Loading graph...")
262
- graph = load_graph_from_script(script_path)
262
+ graph = await load_graph_from_script(script_path)
263
263
  else: # .json
264
264
  verbose_print("Loading JSON flow...")
265
265
  graph = load_flow_from_json(script_path, disable_logs=not verbose)
lfx/cli/run.py CHANGED
@@ -227,7 +227,7 @@ async def run(
227
227
  typer.echo(f"Type: {graph_info['type']}", file=sys.stderr)
228
228
  typer.echo(f"Source: {graph_info['source_line']}", file=sys.stderr)
229
229
  typer.echo("Loading and executing script...", file=sys.stderr)
230
- graph = load_graph_from_script(script_path)
230
+ graph = await load_graph_from_script(script_path)
231
231
  elif file_extension == ".json":
232
232
  if verbosity > 0:
233
233
  typer.echo("Valid JSON flow file detected", file=sys.stderr)
lfx/cli/script_loader.py CHANGED
@@ -6,6 +6,7 @@ containing LFX graph variables.
6
6
 
7
7
  import ast
8
8
  import importlib.util
9
+ import inspect
9
10
  import json
10
11
  import sys
11
12
  from contextlib import contextmanager
@@ -71,26 +72,39 @@ def _validate_graph_instance(graph_obj: Any) -> Graph:
71
72
  return graph_obj
72
73
 
73
74
 
74
- def load_graph_from_script(script_path: Path) -> Graph:
75
- """Load and execute a Python script to extract the 'graph' variable.
75
+ async def load_graph_from_script(script_path: Path) -> Graph:
76
+ """Load and execute a Python script to extract the 'graph' variable or call 'get_graph' function.
76
77
 
77
78
  Args:
78
79
  script_path (Path): Path to the Python script file
79
80
 
80
81
  Returns:
81
- dict: Information about the loaded graph variable including the graph object itself
82
+ Graph: The loaded and validated graph instance
82
83
  """
83
84
  try:
84
85
  # Load the module
85
86
  module = _load_module_from_script(script_path)
86
87
 
87
- # Check if 'graph' variable exists
88
- if not hasattr(module, "graph"):
89
- msg = "No 'graph' variable found in the executed script"
88
+ graph_obj = None
89
+
90
+ # First, try to get graph from 'get_graph' function (preferred for async code)
91
+ if hasattr(module, "get_graph") and callable(module.get_graph):
92
+ get_graph_func = module.get_graph
93
+
94
+ # Check if get_graph is async and handle accordingly
95
+ if inspect.iscoroutinefunction(get_graph_func):
96
+ graph_obj = await get_graph_func()
97
+ else:
98
+ graph_obj = get_graph_func()
99
+
100
+ # Fallback to 'graph' variable for backward compatibility
101
+ elif hasattr(module, "graph"):
102
+ graph_obj = module.graph
103
+
104
+ if graph_obj is None:
105
+ msg = "No 'graph' variable or 'get_graph()' function found in the executed script"
90
106
  raise ValueError(msg)
91
107
 
92
- # Extract graph information
93
- graph_obj = module.graph
94
108
  return _validate_graph_instance(graph_obj)
95
109
 
96
110
  except (
@@ -178,13 +192,13 @@ def extract_structured_result(results: list, *, extract_text: bool = True) -> di
178
192
 
179
193
 
180
194
  def find_graph_variable(script_path: Path) -> dict | None:
181
- """Parse a Python script and find the 'graph' variable assignment.
195
+ """Parse a Python script and find the 'graph' variable assignment or 'get_graph' function.
182
196
 
183
197
  Args:
184
198
  script_path (Path): Path to the Python script file
185
199
 
186
200
  Returns:
187
- dict | None: Information about the graph variable if found, None otherwise
201
+ dict | None: Information about the graph variable or get_graph function if found, None otherwise
188
202
  """
189
203
  try:
190
204
  with script_path.open(encoding="utf-8") as f:
@@ -193,8 +207,36 @@ def find_graph_variable(script_path: Path) -> dict | None:
193
207
  # Parse the script using AST
194
208
  tree = ast.parse(content)
195
209
 
196
- # Look for assignments to 'graph' variable
210
+ # Look for 'get_graph' function definitions (preferred) or 'graph' variable assignments
197
211
  for node in ast.walk(tree):
212
+ # Check for get_graph function definition
213
+ if isinstance(node, ast.FunctionDef) and node.name == "get_graph":
214
+ line_number = node.lineno
215
+ is_async = isinstance(node, ast.AsyncFunctionDef)
216
+
217
+ return {
218
+ "line_number": line_number,
219
+ "type": "function_definition",
220
+ "function": "get_graph",
221
+ "is_async": is_async,
222
+ "arg_count": len(node.args.args),
223
+ "source_line": content.split("\n")[line_number - 1].strip(),
224
+ }
225
+
226
+ # Check for async get_graph function definition
227
+ if isinstance(node, ast.AsyncFunctionDef) and node.name == "get_graph":
228
+ line_number = node.lineno
229
+
230
+ return {
231
+ "line_number": line_number,
232
+ "type": "function_definition",
233
+ "function": "get_graph",
234
+ "is_async": True,
235
+ "arg_count": len(node.args.args),
236
+ "source_line": content.split("\n")[line_number - 1].strip(),
237
+ }
238
+
239
+ # Fallback: look for assignments to 'graph' variable
198
240
  if isinstance(node, ast.Assign):
199
241
  # Check if any target is named 'graph'
200
242
  for target in node.targets:
@@ -14,7 +14,7 @@ from lfx.schema.data import Data
14
14
  class NotionPageCreator(LCToolComponent):
15
15
  display_name: str = "Create Page "
16
16
  description: str = "A component for creating Notion pages."
17
- documentation: str = "https://docs.langflow.org/integrations/notion/page-create"
17
+ documentation: str = "https://docs.langflow.org/bundles-notion"
18
18
  icon = "NotionDirectoryLoader"
19
19
 
20
20
  inputs = [
@@ -12,7 +12,7 @@ from lfx.schema.data import Data
12
12
  class NotionDatabaseProperties(LCToolComponent):
13
13
  display_name: str = "List Database Properties "
14
14
  description: str = "Retrieve properties of a Notion database."
15
- documentation: str = "https://docs.langflow.org/integrations/notion/list-database-properties"
15
+ documentation: str = "https://docs.langflow.org/bundles-notion"
16
16
  icon = "NotionDirectoryLoader"
17
17
 
18
18
  inputs = [
@@ -21,7 +21,7 @@ class NotionListPages(LCToolComponent):
21
21
  '{"filter": {"property": "Status", "select": {"equals": "Done"}}, '
22
22
  '"sorts": [{"timestamp": "created_time", "direction": "descending"}]}'
23
23
  )
24
- documentation: str = "https://docs.langflow.org/integrations/notion/list-pages"
24
+ documentation: str = "https://docs.langflow.org/bundles-notion"
25
25
  icon = "NotionDirectoryLoader"
26
26
 
27
27
  inputs = [
@@ -11,7 +11,7 @@ from lfx.schema.data import Data
11
11
  class NotionUserList(LCToolComponent):
12
12
  display_name = "List Users "
13
13
  description = "Retrieve users from Notion."
14
- documentation = "https://docs.langflow.org/integrations/notion/list-users"
14
+ documentation = "https://docs.langflow.org/bundles-notion"
15
15
  icon = "NotionDirectoryLoader"
16
16
 
17
17
  inputs = [
@@ -12,7 +12,7 @@ from lfx.schema.data import Data
12
12
  class NotionPageContent(LCToolComponent):
13
13
  display_name = "Page Content Viewer "
14
14
  description = "Retrieve the content of a Notion page as plain text."
15
- documentation = "https://docs.langflow.org/integrations/notion/page-content-viewer"
15
+ documentation = "https://docs.langflow.org/bundles-notion"
16
16
  icon = "NotionDirectoryLoader"
17
17
 
18
18
  inputs = [
@@ -13,7 +13,7 @@ from lfx.schema.data import Data
13
13
  class NotionSearch(LCToolComponent):
14
14
  display_name: str = "Search "
15
15
  description: str = "Searches all pages and databases that have been shared with an integration."
16
- documentation: str = "https://docs.langflow.org/integrations/notion/search"
16
+ documentation: str = "https://docs.langflow.org/bundles-notion"
17
17
  icon = "NotionDirectoryLoader"
18
18
 
19
19
  inputs = [
@@ -15,7 +15,7 @@ from lfx.schema.data import Data
15
15
  class NotionPageUpdate(LCToolComponent):
16
16
  display_name: str = "Update Page Property "
17
17
  description: str = "Update the properties of a Notion page."
18
- documentation: str = "https://docs.langflow.org/integrations/notion/page-update"
18
+ documentation: str = "https://docs.langflow.org/bundles-notion"
19
19
  icon = "NotionDirectoryLoader"
20
20
 
21
21
  inputs = [
@@ -10,8 +10,8 @@ if TYPE_CHECKING:
10
10
  FAISS,
11
11
  Notion,
12
12
  agentql,
13
- agents,
14
13
  aiml,
14
+ altk,
15
15
  amazon,
16
16
  anthropic,
17
17
  apify,
@@ -31,6 +31,7 @@ if TYPE_CHECKING:
31
31
  confluence,
32
32
  couchbase,
33
33
  crewai,
34
+ cuga,
34
35
  custom_component,
35
36
  data,
36
37
  datastax,
@@ -64,6 +65,7 @@ if TYPE_CHECKING:
64
65
  milvus,
65
66
  mistral,
66
67
  models,
68
+ models_and_agents,
67
69
  mongodb,
68
70
  needle,
69
71
  notdiamond,
@@ -111,8 +113,8 @@ if TYPE_CHECKING:
111
113
  _dynamic_imports = {
112
114
  # Category modules (existing functionality)
113
115
  "agentql": "__module__",
114
- "agents": "__module__",
115
116
  "aiml": "__module__",
117
+ "altk": "__module__",
116
118
  "amazon": "__module__",
117
119
  "anthropic": "__module__",
118
120
  "apify": "__module__",
@@ -132,6 +134,7 @@ _dynamic_imports = {
132
134
  "confluence": "__module__",
133
135
  "couchbase": "__module__",
134
136
  "crewai": "__module__",
137
+ "cuga": "__module__",
135
138
  "custom_component": "__module__",
136
139
  "data": "__module__",
137
140
  "datastax": "__module__",
@@ -166,6 +169,7 @@ _dynamic_imports = {
166
169
  "milvus": "__module__",
167
170
  "mistral": "__module__",
168
171
  "models": "__module__",
172
+ "models_and_agents": "__module__",
169
173
  "mongodb": "__module__",
170
174
  "needle": "__module__",
171
175
  "notdiamond": "__module__",
@@ -242,8 +246,8 @@ __all__ = [
242
246
  "FAISS",
243
247
  "Notion",
244
248
  "agentql",
245
- "agents",
246
249
  "aiml",
250
+ "altk",
247
251
  "amazon",
248
252
  "anthropic",
249
253
  "apify",
@@ -263,6 +267,7 @@ __all__ = [
263
267
  "confluence",
264
268
  "couchbase",
265
269
  "crewai",
270
+ "cuga",
266
271
  "custom_component",
267
272
  "data",
268
273
  "datastax",
@@ -296,6 +301,7 @@ __all__ = [
296
301
  "milvus",
297
302
  "mistral",
298
303
  "models",
304
+ "models_and_agents",
299
305
  "mongodb",
300
306
  "needle",
301
307
  "notdiamond",
@@ -382,8 +388,16 @@ def __getattr__(attr_name: str) -> Any:
382
388
  elif "." in module_path:
383
389
  # This is a component import (e.g., components.AgentComponent -> agents.agent)
384
390
  module_name, component_file = module_path.split(".", 1)
385
- # Import the specific component from its module
386
- result = import_mod(attr_name, component_file, f"{__spec__.parent}.{module_name}")
391
+ # Check if this is an alias module (data, helpers, logic, models)
392
+ # These modules forward to other modules, so we need to import directly from the module
393
+ # instead of trying to import from a submodule that doesn't exist
394
+ if module_name in ("data", "helpers", "logic", "models"):
395
+ # For alias modules, import the module and get the component directly
396
+ alias_module = import_mod(module_name, "__module__", __spec__.parent)
397
+ result = getattr(alias_module, attr_name)
398
+ else:
399
+ # Import the specific component from its module
400
+ result = import_mod(attr_name, component_file, f"{__spec__.parent}.{module_name}")
387
401
  else:
388
402
  # Fallback to regular import
389
403
  result = import_mod(attr_name, module_path, __spec__.parent)
@@ -5,23 +5,19 @@ from typing import TYPE_CHECKING, Any
5
5
  from lfx.components._importing import import_mod
6
6
 
7
7
  if TYPE_CHECKING:
8
- from lfx.components.agents.agent import AgentComponent
9
- from lfx.components.agents.altk_agent import ALTKAgentComponent
10
- from lfx.components.agents.cuga_agent import CugaComponent
11
- from lfx.components.agents.mcp_component import MCPToolsComponent
8
+ from .altk_agent import ALTKAgentComponent
12
9
 
13
10
  _dynamic_imports = {
14
- "AgentComponent": "agent",
15
- "CugaComponent": "cuga_agent",
16
- "MCPToolsComponent": "mcp_component",
17
11
  "ALTKAgentComponent": "altk_agent",
18
12
  }
19
13
 
20
- __all__ = ["ALTKAgentComponent", "AgentComponent", "CugaComponent", "MCPToolsComponent"]
14
+ __all__ = [
15
+ "ALTKAgentComponent",
16
+ ]
21
17
 
22
18
 
23
19
  def __getattr__(attr_name: str) -> Any:
24
- """Lazily import agent components on attribute access."""
20
+ """Lazily import altk components on attribute access."""
25
21
  if attr_name not in _dynamic_imports:
26
22
  msg = f"module '{__name__}' has no attribute '{attr_name}'"
27
23
  raise AttributeError(msg)
@@ -0,0 +1,193 @@
1
+ """ALTK Agent Component that combines pre-tool validation and post-tool processing capabilities."""
2
+
3
+ from lfx.base.agents.altk_base_agent import ALTKBaseAgentComponent
4
+ from lfx.base.agents.altk_tool_wrappers import (
5
+ PostToolProcessingWrapper,
6
+ PreToolValidationWrapper,
7
+ )
8
+ from lfx.base.models.model_input_constants import MODEL_PROVIDERS_DICT, MODELS_METADATA
9
+ from lfx.components.models_and_agents.memory import MemoryComponent
10
+ from lfx.inputs.inputs import BoolInput
11
+ from lfx.io import DropdownInput, IntInput, Output
12
+ from lfx.log.logger import logger
13
+
14
+
15
+ def set_advanced_true(component_input):
16
+ """Set the advanced flag to True for a component input."""
17
+ component_input.advanced = True
18
+ return component_input
19
+
20
+
21
+ MODEL_PROVIDERS_LIST = ["Anthropic", "OpenAI"]
22
+ INPUT_NAMES_TO_BE_OVERRIDDEN = ["agent_llm"]
23
+
24
+
25
+ def get_parent_agent_inputs():
26
+ return [
27
+ input_field
28
+ for input_field in ALTKBaseAgentComponent.inputs
29
+ if input_field.name not in INPUT_NAMES_TO_BE_OVERRIDDEN
30
+ ]
31
+
32
+
33
+ # === Combined ALTK Agent Component ===
34
+
35
+
36
+ class ALTKAgentComponent(ALTKBaseAgentComponent):
37
+ """ALTK Agent with both pre-tool validation and post-tool processing capabilities.
38
+
39
+ This agent combines the functionality of both ALTKAgent and AgentReflection components,
40
+ implementing a modular pipeline for tool processing that can be extended with
41
+ additional capabilities in the future.
42
+ """
43
+
44
+ display_name: str = "ALTK Agent"
45
+ description: str = "Advanced agent with both pre-tool validation and post-tool processing capabilities."
46
+ documentation: str = "https://docs.langflow.org/bundles-altk"
47
+ icon = "zap"
48
+ beta = True
49
+ name = "ALTK Agent"
50
+
51
+ memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]
52
+
53
+ # Filter out json_mode from OpenAI inputs since we handle structured output differently
54
+ if "OpenAI" in MODEL_PROVIDERS_DICT:
55
+ openai_inputs_filtered = [
56
+ input_field
57
+ for input_field in MODEL_PROVIDERS_DICT["OpenAI"]["inputs"]
58
+ if not (hasattr(input_field, "name") and input_field.name == "json_mode")
59
+ ]
60
+ else:
61
+ openai_inputs_filtered = []
62
+
63
+ inputs = [
64
+ DropdownInput(
65
+ name="agent_llm",
66
+ display_name="Model Provider",
67
+ info="The provider of the language model that the agent will use to generate responses.",
68
+ options=[*MODEL_PROVIDERS_LIST],
69
+ value="OpenAI",
70
+ real_time_refresh=True,
71
+ refresh_button=False,
72
+ input_types=[],
73
+ options_metadata=[MODELS_METADATA[key] for key in MODEL_PROVIDERS_LIST if key in MODELS_METADATA],
74
+ ),
75
+ *get_parent_agent_inputs(),
76
+ BoolInput(
77
+ name="enable_tool_validation",
78
+ display_name="Tool Validation",
79
+ info="Validates tool calls using SPARC before execution.",
80
+ value=True,
81
+ ),
82
+ BoolInput(
83
+ name="enable_post_tool_reflection",
84
+ display_name="Post Tool JSON Processing",
85
+ info="Processes tool output through JSON analysis.",
86
+ value=True,
87
+ ),
88
+ IntInput(
89
+ name="response_processing_size_threshold",
90
+ display_name="Response Processing Size Threshold",
91
+ value=100,
92
+ info="Tool output is post-processed only if response exceeds this character threshold.",
93
+ advanced=True,
94
+ ),
95
+ ]
96
+ outputs = [
97
+ Output(name="response", display_name="Response", method="message_response"),
98
+ ]
99
+
100
+ def configure_tool_pipeline(self) -> None:
101
+ """Configure the tool pipeline with wrappers based on enabled features."""
102
+ wrappers = []
103
+
104
+ # Add post-tool processing first (innermost wrapper)
105
+ if self.enable_post_tool_reflection:
106
+ logger.info("Enabling Post-Tool Processing Wrapper!")
107
+ post_processor = PostToolProcessingWrapper(
108
+ response_processing_size_threshold=self.response_processing_size_threshold
109
+ )
110
+ wrappers.append(post_processor)
111
+
112
+ # Add pre-tool validation last (outermost wrapper)
113
+ if self.enable_tool_validation:
114
+ logger.info("Enabling Pre-Tool Validation Wrapper!")
115
+ pre_validator = PreToolValidationWrapper()
116
+ wrappers.append(pre_validator)
117
+
118
+ self.pipeline_manager.configure_wrappers(wrappers)
119
+
120
+ def update_runnable_instance(self, agent, runnable, tools):
121
+ """Override to add tool specs update for validation wrappers."""
122
+ # Get context info (copied from parent)
123
+ user_query = self.get_user_query()
124
+ conversation_context = self.build_conversation_context()
125
+
126
+ # Initialize pipeline (this ensures configure_tool_pipeline is called)
127
+ self._initialize_tool_pipeline()
128
+
129
+ # Update tool specs for validation wrappers BEFORE processing
130
+ for wrapper in self.pipeline_manager.wrappers:
131
+ if isinstance(wrapper, PreToolValidationWrapper) and tools:
132
+ wrapper.tool_specs = wrapper.convert_langchain_tools_to_sparc_tool_specs_format(tools)
133
+
134
+ # Process tools with updated specs
135
+ processed_tools = self.pipeline_manager.process_tools(
136
+ list(tools or []),
137
+ agent=agent,
138
+ user_query=user_query,
139
+ conversation_context=conversation_context,
140
+ )
141
+
142
+ runnable.tools = processed_tools
143
+ return runnable
144
+
145
+ def __init__(self, **kwargs):
146
+ """Initialize ALTK agent with input normalization for Data.to_lc_message() inconsistencies."""
147
+ super().__init__(**kwargs)
148
+
149
+ # If input_value uses Data.to_lc_message(), wrap it to provide consistent content
150
+ if hasattr(self.input_value, "to_lc_message") and callable(self.input_value.to_lc_message):
151
+ self.input_value = self._create_normalized_input_proxy(self.input_value)
152
+
153
+ def _create_normalized_input_proxy(self, original_input):
154
+ """Create a proxy that normalizes to_lc_message() content format."""
155
+
156
+ class NormalizedInputProxy:
157
+ def __init__(self, original):
158
+ self._original = original
159
+
160
+ def __getattr__(self, name):
161
+ if name == "to_lc_message":
162
+ return self._normalized_to_lc_message
163
+ return getattr(self._original, name)
164
+
165
+ def _normalized_to_lc_message(self):
166
+ """Return a message with normalized string content."""
167
+ original_msg = self._original.to_lc_message()
168
+
169
+ # If content is in list format, normalize it to string
170
+ if hasattr(original_msg, "content") and isinstance(original_msg.content, list):
171
+ from langchain_core.messages import AIMessage, HumanMessage
172
+
173
+ from lfx.base.agents.altk_base_agent import (
174
+ normalize_message_content,
175
+ )
176
+
177
+ normalized_content = normalize_message_content(original_msg)
178
+
179
+ # Create new message with string content
180
+ if isinstance(original_msg, HumanMessage):
181
+ return HumanMessage(content=normalized_content)
182
+ return AIMessage(content=normalized_content)
183
+
184
+ # Return original if already string format
185
+ return original_msg
186
+
187
+ def __str__(self):
188
+ return str(self._original)
189
+
190
+ def __repr__(self):
191
+ return f"NormalizedInputProxy({self._original!r})"
192
+
193
+ return NormalizedInputProxy(original_input)
@@ -22,7 +22,7 @@ class ApifyActorsComponent(Component):
22
22
  "Use Apify Actors to extract data from hundreds of places fast. "
23
23
  "This component can be used in a flow to retrieve data or as a tool with an agent."
24
24
  )
25
- documentation: str = "http://docs.langflow.org/integrations-apify"
25
+ documentation: str = "https://docs.langflow.org/bundles-apify"
26
26
  icon = "Apify"
27
27
  name = "ApifyActors"
28
28