lfx-nightly 0.2.0.dev41__py3-none-any.whl → 0.3.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. lfx/__main__.py +137 -6
  2. lfx/_assets/component_index.json +1 -1
  3. lfx/base/agents/agent.py +10 -6
  4. lfx/base/agents/altk_base_agent.py +5 -3
  5. lfx/base/agents/altk_tool_wrappers.py +1 -1
  6. lfx/base/agents/events.py +1 -1
  7. lfx/base/agents/utils.py +4 -0
  8. lfx/base/composio/composio_base.py +78 -41
  9. lfx/base/data/cloud_storage_utils.py +156 -0
  10. lfx/base/data/docling_utils.py +130 -55
  11. lfx/base/datastax/astradb_base.py +75 -64
  12. lfx/base/embeddings/embeddings_class.py +113 -0
  13. lfx/base/models/__init__.py +11 -1
  14. lfx/base/models/google_generative_ai_constants.py +33 -9
  15. lfx/base/models/model_metadata.py +6 -0
  16. lfx/base/models/ollama_constants.py +196 -30
  17. lfx/base/models/openai_constants.py +37 -10
  18. lfx/base/models/unified_models.py +1123 -0
  19. lfx/base/models/watsonx_constants.py +43 -4
  20. lfx/base/prompts/api_utils.py +40 -5
  21. lfx/base/tools/component_tool.py +2 -9
  22. lfx/cli/__init__.py +10 -2
  23. lfx/cli/commands.py +3 -0
  24. lfx/cli/run.py +65 -409
  25. lfx/cli/script_loader.py +18 -7
  26. lfx/cli/validation.py +6 -3
  27. lfx/components/__init__.py +0 -3
  28. lfx/components/composio/github_composio.py +1 -1
  29. lfx/components/cuga/cuga_agent.py +39 -27
  30. lfx/components/data_source/api_request.py +4 -2
  31. lfx/components/datastax/astradb_assistant_manager.py +4 -2
  32. lfx/components/docling/__init__.py +45 -11
  33. lfx/components/docling/docling_inline.py +39 -49
  34. lfx/components/docling/docling_remote.py +1 -0
  35. lfx/components/elastic/opensearch_multimodal.py +1733 -0
  36. lfx/components/files_and_knowledge/file.py +384 -36
  37. lfx/components/files_and_knowledge/ingestion.py +8 -0
  38. lfx/components/files_and_knowledge/retrieval.py +10 -0
  39. lfx/components/files_and_knowledge/save_file.py +91 -88
  40. lfx/components/langchain_utilities/ibm_granite_handler.py +211 -0
  41. lfx/components/langchain_utilities/tool_calling.py +37 -6
  42. lfx/components/llm_operations/batch_run.py +64 -18
  43. lfx/components/llm_operations/lambda_filter.py +213 -101
  44. lfx/components/llm_operations/llm_conditional_router.py +39 -7
  45. lfx/components/llm_operations/structured_output.py +38 -12
  46. lfx/components/models/__init__.py +16 -74
  47. lfx/components/models_and_agents/agent.py +51 -203
  48. lfx/components/models_and_agents/embedding_model.py +171 -255
  49. lfx/components/models_and_agents/language_model.py +54 -318
  50. lfx/components/models_and_agents/mcp_component.py +96 -10
  51. lfx/components/models_and_agents/prompt.py +105 -18
  52. lfx/components/ollama/ollama_embeddings.py +111 -29
  53. lfx/components/openai/openai_chat_model.py +1 -1
  54. lfx/components/processing/text_operations.py +580 -0
  55. lfx/components/vllm/__init__.py +37 -0
  56. lfx/components/vllm/vllm.py +141 -0
  57. lfx/components/vllm/vllm_embeddings.py +110 -0
  58. lfx/custom/custom_component/component.py +65 -10
  59. lfx/custom/custom_component/custom_component.py +8 -6
  60. lfx/events/observability/__init__.py +0 -0
  61. lfx/events/observability/lifecycle_events.py +111 -0
  62. lfx/field_typing/__init__.py +57 -58
  63. lfx/graph/graph/base.py +40 -1
  64. lfx/graph/utils.py +109 -30
  65. lfx/graph/vertex/base.py +75 -23
  66. lfx/graph/vertex/vertex_types.py +0 -5
  67. lfx/inputs/__init__.py +2 -0
  68. lfx/inputs/input_mixin.py +55 -0
  69. lfx/inputs/inputs.py +120 -0
  70. lfx/interface/components.py +24 -7
  71. lfx/interface/initialize/loading.py +42 -12
  72. lfx/io/__init__.py +2 -0
  73. lfx/run/__init__.py +5 -0
  74. lfx/run/base.py +464 -0
  75. lfx/schema/__init__.py +50 -0
  76. lfx/schema/data.py +1 -1
  77. lfx/schema/image.py +26 -7
  78. lfx/schema/message.py +104 -11
  79. lfx/schema/workflow.py +171 -0
  80. lfx/services/deps.py +12 -0
  81. lfx/services/interfaces.py +43 -1
  82. lfx/services/mcp_composer/service.py +7 -1
  83. lfx/services/schema.py +1 -0
  84. lfx/services/settings/auth.py +95 -4
  85. lfx/services/settings/base.py +11 -1
  86. lfx/services/settings/constants.py +2 -0
  87. lfx/services/settings/utils.py +82 -0
  88. lfx/services/storage/local.py +13 -8
  89. lfx/services/transaction/__init__.py +5 -0
  90. lfx/services/transaction/service.py +35 -0
  91. lfx/tests/unit/components/__init__.py +0 -0
  92. lfx/utils/constants.py +2 -0
  93. lfx/utils/mustache_security.py +79 -0
  94. lfx/utils/validate_cloud.py +81 -3
  95. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/METADATA +7 -2
  96. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/RECORD +98 -80
  97. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/WHEEL +0 -0
  98. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/entry_points.txt +0 -0
lfx/graph/graph/base.py CHANGED
@@ -15,6 +15,9 @@ from functools import partial
15
15
  from itertools import chain
16
16
  from typing import TYPE_CHECKING, Any, cast
17
17
 
18
+ from ag_ui.core import RunFinishedEvent, RunStartedEvent
19
+
20
+ from lfx.events.observability.lifecycle_events import observable
18
21
  from lfx.exceptions.component import ComponentBuildError
19
22
  from lfx.graph.edge.base import CycleEdge, Edge
20
23
  from lfx.graph.graph.constants import Finish, lazy_load_vertex_dict
@@ -728,6 +731,7 @@ class Graph:
728
731
  raise ValueError(msg)
729
732
  vertex.update_raw_params(inputs, overwrite=True)
730
733
 
734
+ @observable
731
735
  async def _run(
732
736
  self,
733
737
  *,
@@ -1519,7 +1523,10 @@ class Graph:
1519
1523
  try:
1520
1524
  params = ""
1521
1525
  should_build = False
1522
- if not vertex.frozen:
1526
+ # Loop components must always build, even when frozen,
1527
+ # because they need to iterate through their data
1528
+ is_loop_component = vertex.display_name == "Loop" or vertex.is_loop
1529
+ if not vertex.frozen or is_loop_component:
1523
1530
  should_build = True
1524
1531
  else:
1525
1532
  # Check the cache for the vertex
@@ -1546,8 +1553,10 @@ class Graph:
1546
1553
  vertex.result.used_frozen_result = True
1547
1554
  except Exception: # noqa: BLE001
1548
1555
  logger.debug("Error finalizing build", exc_info=True)
1556
+ vertex.built = False
1549
1557
  should_build = True
1550
1558
  except KeyError:
1559
+ vertex.built = False
1551
1560
  should_build = True
1552
1561
 
1553
1562
  if should_build:
@@ -2129,6 +2138,17 @@ class Graph:
2129
2138
  """Get all vertex IDs in the graph."""
2130
2139
  return [vertex.id for vertex in self.vertices]
2131
2140
 
2141
+ def get_terminal_nodes(self) -> list[str]:
2142
+ """Returns vertex IDs that are terminal nodes (not source of any edge).
2143
+
2144
+ Terminal nodes are vertices that have no outgoing edges - they are not
2145
+ listed as source_id in any of the graph's edges.
2146
+
2147
+ Returns:
2148
+ list[str]: List of vertex IDs that are terminal nodes.
2149
+ """
2150
+ return [vertex.id for vertex in self.vertices if not self.successor_map.get(vertex.id, [])]
2151
+
2132
2152
  def sort_vertices(
2133
2153
  self,
2134
2154
  stop_component_id: str | None = None,
@@ -2293,3 +2313,22 @@ class Graph:
2293
2313
  predecessors = [i.id for i in self.get_predecessors(vertex)]
2294
2314
  result |= {vertex_id: {"successors": sucessors, "predecessors": predecessors}}
2295
2315
  return result
2316
+
2317
+ def raw_event_metrics(self, optional_fields: dict | None = None) -> dict:
2318
+ if optional_fields is None:
2319
+ optional_fields = {}
2320
+ import time
2321
+
2322
+ return {"timestamp": time.time(), **optional_fields}
2323
+
2324
+ def before_callback_event(self, *args, **kwargs) -> RunStartedEvent: # noqa: ARG002
2325
+ metrics = {}
2326
+ if hasattr(self, "raw_event_metrics"):
2327
+ metrics = self.raw_event_metrics({"total_components": len(self.vertices)})
2328
+ return RunStartedEvent(run_id=self._run_id, thread_id=self.flow_id, raw_event=metrics)
2329
+
2330
+ def after_callback_event(self, result: Any = None, *args, **kwargs) -> RunFinishedEvent: # noqa: ARG002
2331
+ metrics = {}
2332
+ if hasattr(self, "raw_event_metrics"):
2333
+ metrics = self.raw_event_metrics({"total_components": len(self.vertices)})
2334
+ return RunFinishedEvent(run_id=self._run_id, thread_id=self.flow_id, result=None, raw_event=metrics)
lfx/graph/utils.py CHANGED
@@ -11,7 +11,7 @@ from lfx.schema.data import Data
11
11
  from lfx.schema.message import Message
12
12
 
13
13
  # Database imports removed - lfx should be lightweight
14
- from lfx.services.deps import get_db_service, get_settings_service
14
+ from lfx.services.deps import get_settings_service
15
15
 
16
16
  if TYPE_CHECKING:
17
17
  from lfx.graph.vertex.base import Vertex
@@ -108,32 +108,65 @@ def _vertex_to_primitive_dict(target: Vertex) -> dict:
108
108
  async def log_transaction(
109
109
  flow_id: str | UUID,
110
110
  source: Vertex,
111
- status,
112
- target: Vertex | None = None, # noqa: ARG001
113
- error=None, # noqa: ARG001
111
+ status: str,
112
+ target: Vertex | None = None,
113
+ error: str | Exception | None = None,
114
+ outputs: dict[str, Any] | None = None,
114
115
  ) -> None:
115
116
  """Asynchronously logs a transaction record for a vertex in a flow if transaction storage is enabled.
116
117
 
117
- This is a lightweight implementation that only logs if database service is available.
118
+ Uses the pluggable TransactionService to log transactions. When running within langflow,
119
+ the concrete TransactionService implementation persists to the database.
120
+ When running standalone (lfx only), transactions are not persisted.
121
+
122
+ Args:
123
+ flow_id: The flow ID
124
+ source: The source vertex (component being executed)
125
+ status: Transaction status (success/error)
126
+ target: Optional target vertex (for data transfer logging)
127
+ error: Optional error information
128
+ outputs: Optional explicit outputs dict (component execution results)
118
129
  """
119
130
  try:
120
- settings_service = get_settings_service()
121
- if not settings_service or not getattr(settings_service.settings, "transactions_storage_enabled", False):
131
+ # Guard against null source
132
+ if source is None:
122
133
  return
123
134
 
124
- db_service = get_db_service()
125
- if db_service is None:
126
- logger.debug("Database service not available, skipping transaction logging")
135
+ # Get the transaction service via dependency injection
136
+ from lfx.services.deps import get_transaction_service
137
+
138
+ transaction_service = get_transaction_service()
139
+
140
+ # If no transaction service is available or it's disabled, skip logging
141
+ if transaction_service is None or not transaction_service.is_enabled():
127
142
  return
128
143
 
144
+ # Resolve flow_id
129
145
  if not flow_id:
130
146
  if source.graph.flow_id:
131
147
  flow_id = source.graph.flow_id
132
148
  else:
133
149
  return
134
150
 
135
- # Log basic transaction info - concrete implementation should be in langflow
136
- logger.debug(f"Transaction logged: vertex={source.id}, flow={flow_id}, status={status}")
151
+ # Convert UUID to string for the service interface
152
+ flow_id_str = str(flow_id) if isinstance(flow_id, UUID) else flow_id
153
+
154
+ # Prepare inputs and outputs
155
+ inputs = _vertex_to_primitive_dict(source) if source else None
156
+ target_outputs = _vertex_to_primitive_dict(target) if target else None
157
+ transaction_outputs = outputs if outputs is not None else target_outputs
158
+
159
+ # Log transaction via the service
160
+ await transaction_service.log_transaction(
161
+ flow_id=flow_id_str,
162
+ vertex_id=source.id,
163
+ inputs=inputs,
164
+ outputs=transaction_outputs,
165
+ status=status,
166
+ target_id=target.id if target else None,
167
+ error=str(error) if error else None,
168
+ )
169
+
137
170
  except Exception as exc: # noqa: BLE001
138
171
  logger.debug(f"Error logging transaction: {exc!s}")
139
172
 
@@ -143,35 +176,81 @@ async def log_vertex_build(
143
176
  flow_id: str | UUID,
144
177
  vertex_id: str,
145
178
  valid: bool,
146
- params: Any, # noqa: ARG001
147
- data: dict | Any, # noqa: ARG001
148
- artifacts: dict | None = None, # noqa: ARG001
179
+ params: Any,
180
+ data: dict | Any,
181
+ artifacts: dict | None = None,
149
182
  ) -> None:
150
183
  """Asynchronously logs a vertex build record if vertex build storage is enabled.
151
184
 
152
185
  This is a lightweight implementation that only logs if database service is available.
186
+ When running within langflow, it will use langflow's database service to persist the build.
187
+ When running standalone (lfx only), it will only log debug messages.
153
188
  """
154
189
  try:
155
- settings_service = get_settings_service()
156
- if not settings_service or not getattr(settings_service.settings, "vertex_builds_storage_enabled", False):
157
- return
190
+ # Try to use langflow's services if available (when running within langflow)
191
+ try:
192
+ from langflow.services.deps import get_db_service as langflow_get_db_service
193
+ from langflow.services.deps import get_settings_service as langflow_get_settings_service
158
194
 
159
- db_service = get_db_service()
160
- if db_service is None:
161
- logger.debug("Database service not available, skipping vertex build logging")
162
- return
195
+ settings_service = langflow_get_settings_service()
196
+ if not settings_service:
197
+ return
198
+ if not getattr(settings_service.settings, "vertex_builds_storage_enabled", False):
199
+ return
163
200
 
164
- try:
165
201
  if isinstance(flow_id, str):
166
202
  flow_id = UUID(flow_id)
167
- except ValueError:
168
- logger.debug(f"Invalid flow_id passed to log_vertex_build: {flow_id!r}")
169
- return
170
203
 
171
- # Log basic vertex build info - concrete implementation should be in langflow
172
- logger.debug(f"Vertex build logged: vertex={vertex_id}, flow={flow_id}, valid={valid}")
173
- except Exception: # noqa: BLE001
174
- logger.debug("Error logging vertex build")
204
+ from langflow.services.database.models.vertex_builds.crud import (
205
+ log_vertex_build as crud_log_vertex_build,
206
+ )
207
+ from langflow.services.database.models.vertex_builds.model import VertexBuildBase
208
+
209
+ # Convert data to dict if it's a pydantic model
210
+ data_dict = data
211
+ if hasattr(data, "model_dump"):
212
+ data_dict = data.model_dump()
213
+ elif hasattr(data, "dict"):
214
+ data_dict = data.dict()
215
+
216
+ # Convert artifacts to dict if it's a pydantic model
217
+ artifacts_dict = artifacts
218
+ if artifacts is not None:
219
+ if hasattr(artifacts, "model_dump"):
220
+ artifacts_dict = artifacts.model_dump()
221
+ elif hasattr(artifacts, "dict"):
222
+ artifacts_dict = artifacts.dict()
223
+
224
+ vertex_build = VertexBuildBase(
225
+ flow_id=flow_id,
226
+ id=vertex_id,
227
+ valid=valid,
228
+ params=str(params) if params else None,
229
+ data=data_dict,
230
+ artifacts=artifacts_dict,
231
+ )
232
+
233
+ db_service = langflow_get_db_service()
234
+ if db_service is None:
235
+ return
236
+
237
+ async with db_service._with_session() as session: # noqa: SLF001
238
+ await crud_log_vertex_build(session, vertex_build)
239
+
240
+ except ImportError:
241
+ # Fallback for standalone lfx usage (without langflow)
242
+ settings_service = get_settings_service()
243
+ if not settings_service or not getattr(settings_service.settings, "vertex_builds_storage_enabled", False):
244
+ return
245
+
246
+ if isinstance(flow_id, str):
247
+ flow_id = UUID(flow_id)
248
+
249
+ # Log basic vertex build info - concrete implementation is in langflow
250
+ logger.debug(f"Vertex build logged: vertex={vertex_id}, flow={flow_id}, valid={valid}")
251
+
252
+ except Exception as exc: # noqa: BLE001
253
+ logger.warning(f"Error logging vertex build: {exc}")
175
254
 
176
255
 
177
256
  def rewrite_file_path(file_path: str):
lfx/graph/vertex/base.py CHANGED
@@ -8,6 +8,9 @@ from collections.abc import AsyncIterator, Callable, Iterator, Mapping
8
8
  from enum import Enum
9
9
  from typing import TYPE_CHECKING, Any
10
10
 
11
+ from ag_ui.core import StepFinishedEvent, StepStartedEvent
12
+
13
+ from lfx.events.observability.lifecycle_events import observable
11
14
  from lfx.exceptions.component import ComponentBuildError
12
15
  from lfx.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData
13
16
  from lfx.graph.utils import UnbuiltObject, UnbuiltResult, log_transaction
@@ -105,7 +108,6 @@ class Vertex:
105
108
  self.use_result = False
106
109
  self.build_times: list[float] = []
107
110
  self.state = VertexStates.ACTIVE
108
- self.log_transaction_tasks: set[asyncio.Task] = set()
109
111
  self.output_names: list[str] = [
110
112
  output["name"] for output in self.outputs if isinstance(output, dict) and "name" in output
111
113
  ]
@@ -180,6 +182,7 @@ class Vertex:
180
182
 
181
183
  if isinstance(self.built_result, UnbuiltResult):
182
184
  return {}
185
+
183
186
  return self.built_result if isinstance(self.built_result, dict) else {"result": self.built_result}
184
187
 
185
188
  def set_artifacts(self) -> None:
@@ -381,6 +384,7 @@ class Vertex:
381
384
  vertex=self,
382
385
  )
383
386
 
387
+ @observable
384
388
  async def _build(
385
389
  self,
386
390
  fallback_to_env_vars,
@@ -390,7 +394,6 @@ class Vertex:
390
394
  """Initiate the build process."""
391
395
  await logger.adebug(f"Building {self.display_name}")
392
396
  await self._build_each_vertex_in_params_dict()
393
-
394
397
  if self.base_type is None:
395
398
  msg = f"Base type for vertex {self.display_name} not found"
396
399
  raise ValueError(msg)
@@ -531,11 +534,12 @@ class Vertex:
531
534
  self,
532
535
  flow_id: str | UUID,
533
536
  source: Vertex,
534
- status,
537
+ status: str,
535
538
  target: Vertex | None = None,
536
- error=None,
539
+ error: str | Exception | None = None,
540
+ outputs: dict[str, Any] | None = None,
537
541
  ) -> None:
538
- """Log a transaction asynchronously with proper task handling and cancellation.
542
+ """Log a transaction asynchronously.
539
543
 
540
544
  Args:
541
545
  flow_id: The ID of the flow
@@ -543,20 +547,16 @@ class Vertex:
543
547
  status: Transaction status
544
548
  target: Optional target vertex
545
549
  error: Optional error information
550
+ outputs: Optional explicit outputs dict (component execution results)
546
551
  """
547
- if self.log_transaction_tasks:
548
- # Safely await and remove completed tasks
549
- task = self.log_transaction_tasks.pop()
550
- await task
551
-
552
- # Create and track new task
553
- task = asyncio.create_task(log_transaction(flow_id, source, status, target, error))
554
- self.log_transaction_tasks.add(task)
555
- task.add_done_callback(self.log_transaction_tasks.discard)
552
+ try:
553
+ await log_transaction(flow_id, source, status, target, error, outputs)
554
+ except Exception as exc: # noqa: BLE001
555
+ logger.debug(f"Error logging transaction: {exc!s}")
556
556
 
557
557
  async def _get_result(
558
558
  self,
559
- requester: Vertex,
559
+ requester: Vertex, # noqa: ARG002
560
560
  target_handle_name: str | None = None, # noqa: ARG002
561
561
  ) -> Any:
562
562
  """Retrieves the result of the built component.
@@ -566,17 +566,11 @@ class Vertex:
566
566
  Returns:
567
567
  The built result if use_result is True, else the built object.
568
568
  """
569
- flow_id = self.graph.flow_id
570
569
  if not self.built:
571
- if flow_id:
572
- await self._log_transaction_async(str(flow_id), source=self, target=requester, status="error")
573
570
  msg = f"Component {self.display_name} has not been built yet"
574
571
  raise ValueError(msg)
575
572
 
576
- result = self.built_result if self.use_result else self.built_object
577
- if flow_id:
578
- await self._log_transaction_async(str(flow_id), source=self, target=requester, status="success")
579
- return result
573
+ return self.built_result if self.use_result else self.built_object
580
574
 
581
575
  async def _build_vertex_and_update_params(self, key, vertex: Vertex) -> None:
582
576
  """Builds a given vertex and updates the params dictionary accordingly."""
@@ -657,6 +651,12 @@ class Vertex:
657
651
  except Exception as exc:
658
652
  tb = traceback.format_exc()
659
653
  await logger.aexception(exc)
654
+ # Log transaction error
655
+ flow_id = self.graph.flow_id
656
+ if flow_id:
657
+ await self._log_transaction_async(
658
+ str(flow_id), source=self, target=None, status="error", error=str(exc)
659
+ )
660
660
  msg = f"Error building Component {self.display_name}: \n\n{exc}"
661
661
  raise ComponentBuildError(msg, tb) from exc
662
662
 
@@ -735,7 +735,10 @@ class Vertex:
735
735
  self.build_inactive()
736
736
  return None
737
737
 
738
- if self.frozen and self.built:
738
+ # Loop components should always run, even when frozen,
739
+ # because they need to iterate through their data
740
+ is_loop_component = self.display_name == "Loop" or self.is_loop
741
+ if self.frozen and self.built and not is_loop_component:
739
742
  return await self.get_requester_result(requester)
740
743
  if self.built and requester is not None:
741
744
  # This means that the vertex has already been built
@@ -769,6 +772,19 @@ class Vertex:
769
772
 
770
773
  self.finalize_build()
771
774
 
775
+ # Log transaction after successful build
776
+ flow_id = self.graph.flow_id
777
+ if flow_id:
778
+ # Extract outputs from outputs_logs for transaction logging
779
+ outputs_dict = None
780
+ if self.outputs_logs:
781
+ outputs_dict = {
782
+ k: v.model_dump() if hasattr(v, "model_dump") else v for k, v in self.outputs_logs.items()
783
+ }
784
+ await self._log_transaction_async(
785
+ str(flow_id), source=self, target=None, status="success", outputs=outputs_dict
786
+ )
787
+
772
788
  return await self.get_requester_result(requester)
773
789
 
774
790
  async def get_requester_result(self, requester: Vertex | None):
@@ -821,3 +837,39 @@ class Vertex:
821
837
  return
822
838
  # Apply the function to each output
823
839
  [func(output) for output in self.custom_component.get_outputs_map().values()]
840
+
841
+ # AGUI/AG UI Event Streaming Callbacks/Methods - (Optional, see Observable decorator)
842
+ def raw_event_metrics(self, optional_fields: dict | None) -> dict:
843
+ """This method is used to get the metrics of the vertex by the Observable decorator.
844
+
845
+ If the vertex has a get_metrics method, it will be called, and the metrics will be captured
846
+ to stream back to the user in an AGUI compliant format.
847
+ Additional fields/metrics to be captured can be modified in this method, or in the callback methods,
848
+ which are before_callback_event and after_callback_event before returning the AGUI event.
849
+ """
850
+ if optional_fields is None:
851
+ optional_fields = {}
852
+ import time
853
+
854
+ return {"timestamp": time.time(), **optional_fields}
855
+
856
+ def before_callback_event(self, *args, **kwargs) -> StepStartedEvent: # noqa: ARG002
857
+ """Should be a AGUI compatible event.
858
+
859
+ VERTEX class generates a StepStartedEvent event.
860
+ """
861
+ metrics = {}
862
+ if hasattr(self, "raw_event_metrics"):
863
+ metrics = self.raw_event_metrics({"component_id": self.id})
864
+
865
+ return StepStartedEvent(step_name=self.display_name, raw_event={"langflow": metrics})
866
+
867
+ def after_callback_event(self, result, *args, **kwargs) -> StepFinishedEvent: # noqa: ARG002
868
+ """Should be a AGUI compatible event.
869
+
870
+ VERTEX class generates a StepFinishedEvent event.
871
+ """
872
+ metrics = {}
873
+ if hasattr(self, "raw_event_metrics"):
874
+ metrics = self.raw_event_metrics({"component_id": self.id})
875
+ return StepFinishedEvent(step_name=self.display_name, raw_event={"langflow": metrics})
@@ -98,7 +98,6 @@ class ComponentVertex(Vertex):
98
98
  Returns:
99
99
  The built result if use_result is True, else the built object.
100
100
  """
101
- flow_id = self.graph.flow_id
102
101
  if not self.built:
103
102
  default_value: Any = UNDEFINED
104
103
  for edge in self.get_edge_with_target(requester.id):
@@ -109,8 +108,6 @@ class ComponentVertex(Vertex):
109
108
  else:
110
109
  default_value = requester.get_value_from_template_dict(edge.target_param)
111
110
 
112
- if flow_id:
113
- await self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="error")
114
111
  if default_value is not UNDEFINED:
115
112
  return default_value
116
113
  msg = f"Component {self.display_name} has not been built yet"
@@ -148,8 +145,6 @@ class ComponentVertex(Vertex):
148
145
  raise ValueError(msg)
149
146
  msg = f"Result not found for {edge.source_handle.name} in {edge}"
150
147
  raise ValueError(msg)
151
- if flow_id:
152
- await self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="success")
153
148
  return result
154
149
 
155
150
  def extract_messages_from_artifacts(self, artifacts: dict[str, Any]) -> list[dict]:
lfx/inputs/__init__.py CHANGED
@@ -17,6 +17,7 @@ from .inputs import (
17
17
  McpInput,
18
18
  MessageInput,
19
19
  MessageTextInput,
20
+ ModelInput,
20
21
  MultilineInput,
21
22
  MultilineSecretInput,
22
23
  MultiselectInput,
@@ -52,6 +53,7 @@ __all__ = [
52
53
  "McpInput",
53
54
  "MessageInput",
54
55
  "MessageTextInput",
56
+ "ModelInput",
55
57
  "MultilineInput",
56
58
  "MultilineSecretInput",
57
59
  "MultiselectInput",
lfx/inputs/input_mixin.py CHANGED
@@ -28,6 +28,7 @@ class FieldTypes(str, Enum):
28
28
  AUTH = "auth"
29
29
  FILE = "file"
30
30
  PROMPT = "prompt"
31
+ MUSTACHE_PROMPT = "mustache"
31
32
  CODE = "code"
32
33
  OTHER = "other"
33
34
  TABLE = "table"
@@ -37,6 +38,7 @@ class FieldTypes(str, Enum):
37
38
  QUERY = "query"
38
39
  TOOLS = "tools"
39
40
  MCP = "mcp"
41
+ MODEL = "model"
40
42
 
41
43
 
42
44
  SerializableFieldTypes = Annotated[FieldTypes, PlainSerializer(lambda v: v.value, return_type=str)]
@@ -136,6 +138,59 @@ class BaseInputMixin(CrossModuleModel, validate_assignment=True): # type: ignor
136
138
  return dump
137
139
 
138
140
 
141
+ class ModelInputMixin(BaseModel):
142
+ model_config = ConfigDict(populate_by_name=True)
143
+ """Mixin for model input fields."""
144
+ model_name: str | None = None
145
+ """Name of the model to be used in the input."""
146
+ model_type: str | None = "language"
147
+ """Type of model: 'language' or 'embedding'. Defaults to 'language'."""
148
+ model_options: list[dict[str, Any]] | None = Field(
149
+ default=None,
150
+ validation_alias="options",
151
+ serialization_alias="options",
152
+ )
153
+ """List of model options with name, icon, category, provider, and metadata."""
154
+ temperature: float | None = None
155
+ """Temperature parameter for model generation."""
156
+ max_tokens: int | None = None
157
+ """Maximum tokens for model generation."""
158
+ limit: int | None = None
159
+ """Limit for the number of options to display."""
160
+ external_options: dict[str, Any] | None = None
161
+ """Dictionary of external options to display below the dropdown options (e.g., 'Connect other models')."""
162
+
163
+ @field_validator("model_options", mode="before")
164
+ @classmethod
165
+ def normalize_model_options(cls, v):
166
+ """Convert simple list of model names to list of dicts format.
167
+
168
+ Allows passing ['gpt-4o', 'gpt-4o-mini'] which gets converted to:
169
+ [{'name': 'gpt-4o', ...}, {'name': 'gpt-4o-mini', ...}]
170
+ """
171
+ if v is None or not isinstance(v, list):
172
+ return v
173
+
174
+ # If already in dict format, return as-is
175
+ if all(isinstance(item, dict) for item in v):
176
+ return v
177
+
178
+ # If it's a list of strings, convert to dict format
179
+ if all(isinstance(item, str) for item in v):
180
+ # Avoid circular import by importing the module directly (not through package __init__)
181
+ try:
182
+ from lfx.base.models.unified_models import normalize_model_names_to_dicts
183
+
184
+ return normalize_model_names_to_dicts(v)
185
+ except Exception: # noqa: BLE001
186
+ # Fallback if import or normalization fails
187
+ # This can happen during module initialization or in test environments
188
+ return [{"name": item} for item in v]
189
+
190
+ # Mixed list or unexpected format, return as-is
191
+ return v
192
+
193
+
139
194
  class ToolModeMixin(BaseModel):
140
195
  tool_mode: bool = False
141
196