lfx-nightly 0.2.1.dev7__py3-none-any.whl → 0.3.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. lfx/__main__.py +137 -6
  2. lfx/_assets/component_index.json +1 -1
  3. lfx/base/agents/agent.py +9 -5
  4. lfx/base/agents/altk_base_agent.py +5 -3
  5. lfx/base/agents/events.py +1 -1
  6. lfx/base/models/unified_models.py +1 -1
  7. lfx/base/models/watsonx_constants.py +10 -7
  8. lfx/base/prompts/api_utils.py +40 -5
  9. lfx/cli/__init__.py +10 -2
  10. lfx/cli/script_loader.py +5 -4
  11. lfx/cli/validation.py +6 -3
  12. lfx/components/datastax/astradb_assistant_manager.py +4 -2
  13. lfx/components/docling/docling_remote.py +1 -0
  14. lfx/components/langchain_utilities/ibm_granite_handler.py +211 -0
  15. lfx/components/langchain_utilities/tool_calling.py +24 -1
  16. lfx/components/llm_operations/lambda_filter.py +182 -97
  17. lfx/components/models_and_agents/mcp_component.py +38 -1
  18. lfx/components/models_and_agents/prompt.py +105 -18
  19. lfx/components/ollama/ollama_embeddings.py +109 -28
  20. lfx/components/processing/text_operations.py +580 -0
  21. lfx/custom/custom_component/component.py +65 -10
  22. lfx/events/observability/__init__.py +0 -0
  23. lfx/events/observability/lifecycle_events.py +111 -0
  24. lfx/field_typing/__init__.py +57 -58
  25. lfx/graph/graph/base.py +36 -0
  26. lfx/graph/utils.py +45 -12
  27. lfx/graph/vertex/base.py +71 -22
  28. lfx/graph/vertex/vertex_types.py +0 -5
  29. lfx/inputs/input_mixin.py +1 -0
  30. lfx/inputs/inputs.py +5 -0
  31. lfx/interface/components.py +24 -7
  32. lfx/run/base.py +47 -77
  33. lfx/schema/__init__.py +50 -0
  34. lfx/schema/message.py +85 -8
  35. lfx/schema/workflow.py +171 -0
  36. lfx/services/deps.py +12 -0
  37. lfx/services/interfaces.py +43 -1
  38. lfx/services/schema.py +1 -0
  39. lfx/services/settings/auth.py +95 -4
  40. lfx/services/settings/base.py +4 -0
  41. lfx/services/settings/utils.py +82 -0
  42. lfx/services/transaction/__init__.py +5 -0
  43. lfx/services/transaction/service.py +35 -0
  44. lfx/tests/unit/components/__init__.py +0 -0
  45. lfx/utils/constants.py +1 -0
  46. lfx/utils/mustache_security.py +79 -0
  47. lfx/utils/validate_cloud.py +67 -0
  48. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/METADATA +3 -1
  49. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/RECORD +51 -42
  50. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/WHEEL +0 -0
  51. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/entry_points.txt +0 -0
@@ -1565,7 +1565,24 @@ class Component(CustomComponent):
1565
1565
  return has_chat_input(self.graph.get_vertex_neighbors(self._vertex))
1566
1566
 
1567
1567
  def _should_skip_message(self, message: Message) -> bool:
1568
- """Check if the message should be skipped based on vertex configuration and message type."""
1568
+ """Check if the message should be skipped based on vertex configuration and message type.
1569
+
1570
+ When a message is skipped:
1571
+ - It is NOT stored in the database
1572
+ - It will NOT have an ID (message.get_id() will return None)
1573
+ - It is still returned to the caller, but no events are sent to the frontend
1574
+
1575
+ Messages are skipped when:
1576
+ - The component is not an input or output vertex
1577
+ - The component is not connected to a Chat Output
1578
+ - The message is not an ErrorMessage
1579
+
1580
+ This prevents intermediate components from cluttering the database with messages
1581
+ that aren't meant to be displayed in the chat UI.
1582
+
1583
+ Returns:
1584
+ bool: True if the message should be skipped, False otherwise
1585
+ """
1569
1586
  return (
1570
1587
  self._vertex is not None
1571
1588
  and not (self._vertex.is_output or self._vertex.is_input)
@@ -1603,12 +1620,31 @@ class Component(CustomComponent):
1603
1620
  async def send_message(self, message: Message, id_: str | None = None, *, skip_db_update: bool = False):
1604
1621
  """Send a message with optional database update control.
1605
1622
 
1623
+ This is the central method for sending messages in Langflow. It handles:
1624
+ - Message storage in the database (unless skipped)
1625
+ - Event emission to the frontend
1626
+ - Streaming support
1627
+ - Error handling and cleanup
1628
+
1629
+ Message ID Rules:
1630
+ - Messages only have an ID after being stored in the database
1631
+ - If _should_skip_message() returns True, the message is not stored and will not have an ID
1632
+ - Always use message.get_id() or message.has_id() to safely check for ID existence
1633
+ - Never access message.id directly without checking if it exists first
1634
+
1606
1635
  Args:
1607
1636
  message: The message to send
1608
- id_: Optional message ID
1637
+ id_: Optional message ID (used for event emission, not database storage)
1609
1638
  skip_db_update: If True, only update in-memory and send event, skip DB write.
1610
1639
  Useful during streaming to avoid excessive DB round-trips.
1611
- Note: This assumes the message already exists in the database with message.id set.
1640
+ Note: When skip_db_update=True, the message must already have an ID
1641
+ (i.e., it must have been stored previously).
1642
+
1643
+ Returns:
1644
+ Message: The stored message (with ID if stored in database, without ID if skipped)
1645
+
1646
+ Raises:
1647
+ ValueError: If skip_db_update=True but message doesn't have an ID
1612
1648
  """
1613
1649
  if self._should_skip_message(message):
1614
1650
  return message
@@ -1621,10 +1657,18 @@ class Component(CustomComponent):
1621
1657
 
1622
1658
  # If skip_db_update is True and message already has an ID, skip the DB write
1623
1659
  # This path is used during agent streaming to avoid excessive DB round-trips
1624
- if skip_db_update and message.id:
1660
+ # When skip_db_update=True, we require the message to already have an ID
1661
+ # because we're updating an existing message, not creating a new one
1662
+ if skip_db_update:
1663
+ if not message.has_id():
1664
+ msg = (
1665
+ "skip_db_update=True requires the message to already have an ID. "
1666
+ "The message must have been stored in the database previously."
1667
+ )
1668
+ raise ValueError(msg)
1625
1669
  # Create a fresh Message instance for consistency with normal flow
1626
1670
  stored_message = await Message.create(**message.model_dump())
1627
- self._stored_message_id = stored_message.id
1671
+ self._stored_message_id = stored_message.get_id()
1628
1672
  # Still send the event to update the client in real-time
1629
1673
  # Note: If this fails, we don't need DB cleanup since we didn't write to DB
1630
1674
  await self._send_message_event(stored_message, id_=id_)
@@ -1632,7 +1676,9 @@ class Component(CustomComponent):
1632
1676
  # Normal flow: store/update in database
1633
1677
  stored_message = await self._store_message(message)
1634
1678
 
1635
- self._stored_message_id = stored_message.id
1679
+ # After _store_message, the message should always have an ID
1680
+ # but we use get_id() for safety
1681
+ self._stored_message_id = stored_message.get_id()
1636
1682
  try:
1637
1683
  complete_message = ""
1638
1684
  if (
@@ -1653,7 +1699,10 @@ class Component(CustomComponent):
1653
1699
  await self._send_message_event(stored_message, id_=id_)
1654
1700
  except Exception:
1655
1701
  # remove the message from the database
1656
- await delete_message(stored_message.id)
1702
+ # Only delete if the message has an ID
1703
+ message_id = stored_message.get_id()
1704
+ if message_id:
1705
+ await delete_message(id_=message_id)
1657
1706
  raise
1658
1707
  self.status = stored_message
1659
1708
  return stored_message
@@ -1699,7 +1748,7 @@ class Component(CustomComponent):
1699
1748
  return bool(
1700
1749
  hasattr(self, "_event_manager")
1701
1750
  and self._event_manager
1702
- and stored_message.id
1751
+ and stored_message.has_id()
1703
1752
  and not isinstance(original_message.text, str)
1704
1753
  )
1705
1754
 
@@ -1726,14 +1775,20 @@ class Component(CustomComponent):
1726
1775
  msg = "The message must be an iterator or an async iterator."
1727
1776
  raise TypeError(msg)
1728
1777
 
1778
+ # Get message ID safely - streaming requires an ID
1779
+ message_id = message.get_id()
1780
+ if not message_id:
1781
+ msg = "Message must have an ID to stream. Messages only have IDs after being stored in the database."
1782
+ raise ValueError(msg)
1783
+
1729
1784
  if isinstance(iterator, AsyncIterator):
1730
- return await self._handle_async_iterator(iterator, message.id, message)
1785
+ return await self._handle_async_iterator(iterator, message_id, message)
1731
1786
  try:
1732
1787
  complete_message = ""
1733
1788
  first_chunk = True
1734
1789
  for chunk in iterator:
1735
1790
  complete_message = await self._process_chunk(
1736
- chunk.content, complete_message, message.id, message, first_chunk=first_chunk
1791
+ chunk.content, complete_message, message_id, message, first_chunk=first_chunk
1737
1792
  )
1738
1793
  first_chunk = False
1739
1794
  except Exception as e:
File without changes
@@ -0,0 +1,111 @@
1
+ import functools
2
+ from collections.abc import Awaitable, Callable
3
+ from typing import Any
4
+
5
+ from ag_ui.encoder.encoder import EventEncoder
6
+
7
+ from lfx.log.logger import logger
8
+
9
+ AsyncMethod = Callable[..., Awaitable[Any]]
10
+
11
+ encoder: EventEncoder = EventEncoder()
12
+
13
+
14
+ def observable(observed_method: AsyncMethod) -> AsyncMethod:
15
+ """Decorator to make an async method observable by emitting lifecycle events.
16
+
17
+ Decorated classes are expected to implement specific methods to emit AGUI events:
18
+ - `before_callback_event(*args, **kwargs)`: Called before the decorated method executes.
19
+ It should return a dictionary representing the event payload.
20
+ - `after_callback_event(result, *args, **kwargs)`: Called after the decorated method
21
+ successfully completes. It should return a dictionary representing the event payload.
22
+ The `result` of the decorated method is passed as the first argument.
23
+ - `error_callback_event(exception, *args, **kwargs)`: (Optional) Called if the decorated
24
+ method raises an exception. It should return a dictionary representing the error event payload.
25
+ The `exception` is passed as the first argument.
26
+
27
+ If these methods are implemented, the decorator will call them to generate event payloads.
28
+ If an implementation is missing, the corresponding event publishing will be skipped without error.
29
+
30
+ Payloads returned by these methods can include custom metrics by placing them
31
+ under the 'langflow' key within the 'raw_events' dictionary.
32
+
33
+ Example:
34
+ class MyClass:
35
+ display_name = "My Observable Class"
36
+
37
+ def before_callback_event(self, *args, **kwargs):
38
+ return {"event_name": "my_method_started", "data": {"input_args": args}}
39
+
40
+ async def my_method(self, event_manager: EventManager, data: str):
41
+ # ... method logic ...
42
+ return "processed_data"
43
+
44
+ def after_callback_event(self, result, *args, **kwargs):
45
+ return {"event_name": "my_method_completed", "data": {"output": result}}
46
+
47
+ def error_callback_event(self, exception, *args, **kwargs):
48
+ return {"event_name": "my_method_failed", "error": str(exception)}
49
+
50
+ @observable
51
+ async def my_observable_method(self, event_manager: EventManager, data: str):
52
+ # ... method logic ...
53
+ pass
54
+ """
55
+
56
+ async def check_event_manager(self, **kwargs):
57
+ if "event_manager" not in kwargs or kwargs["event_manager"] is None:
58
+ await logger.awarning(
59
+ f"EventManager not available/provided, skipping observable event publishing "
60
+ f"from {self.__class__.__name__}"
61
+ )
62
+ return False
63
+ return True
64
+
65
+ async def before_callback(self, *args, **kwargs):
66
+ if not await check_event_manager(self, **kwargs):
67
+ return
68
+
69
+ if hasattr(self, "before_callback_event"):
70
+ event_payload = self.before_callback_event(*args, **kwargs)
71
+ event_payload = encoder.encode(event_payload)
72
+ # TODO: Publish event per request, would required context based queues
73
+ else:
74
+ await logger.awarning(
75
+ f"before_callback_event not implemented for {self.__class__.__name__}. Skipping event publishing."
76
+ )
77
+
78
+ async def after_callback(self, res: Any | None = None, *args, **kwargs):
79
+ if not await check_event_manager(self, **kwargs):
80
+ return
81
+ if hasattr(self, "after_callback_event"):
82
+ event_payload = self.after_callback_event(res, *args, **kwargs)
83
+ event_payload = encoder.encode(event_payload)
84
+ # TODO: Publish event per request, would required context based queues
85
+ else:
86
+ await logger.awarning(
87
+ f"after_callback_event not implemented for {self.__class__.__name__}. Skipping event publishing."
88
+ )
89
+
90
+ @functools.wraps(observed_method)
91
+ async def wrapper(self, *args, **kwargs):
92
+ await before_callback(self, *args, **kwargs)
93
+ result = None
94
+ try:
95
+ result = await observed_method(self, *args, **kwargs)
96
+ await after_callback(self, result, *args, **kwargs)
97
+ except Exception as e:
98
+ await logger.aerror(f"Exception in {self.__class__.__name__}: {e}")
99
+ if hasattr(self, "error_callback_event"):
100
+ try:
101
+ event_payload = self.error_callback_event(e, *args, **kwargs)
102
+ event_payload = encoder.encode(event_payload)
103
+ # TODO: Publish event per request, would required context based queues
104
+ except Exception as callback_e: # noqa: BLE001
105
+ await logger.aerror(
106
+ f"Exception during error_callback_event for {self.__class__.__name__}: {callback_e}"
107
+ )
108
+ raise
109
+ return result
110
+
111
+ return wrapper
@@ -1,63 +1,6 @@
1
1
  from typing import Any
2
2
 
3
- from .constants import (
4
- AgentExecutor,
5
- BaseChatMemory,
6
- BaseChatModel,
7
- BaseDocumentCompressor,
8
- BaseLanguageModel,
9
- BaseLLM,
10
- BaseLoader,
11
- BaseMemory,
12
- BaseOutputParser,
13
- BasePromptTemplate,
14
- BaseRetriever,
15
- Callable,
16
- Chain,
17
- ChatPromptTemplate,
18
- Code,
19
- Data,
20
- Document,
21
- Embeddings,
22
- LanguageModel,
23
- NestedDict,
24
- Object,
25
- PromptTemplate,
26
- Retriever,
27
- Text,
28
- TextSplitter,
29
- Tool,
30
- VectorStore,
31
- )
32
- from .range_spec import RangeSpec
33
-
34
-
35
- def _import_input_class():
36
- from lfx.template.field.base import Input
37
-
38
- return Input
39
-
40
-
41
- def _import_output_class():
42
- from lfx.template.field.base import Output
43
-
44
- return Output
45
-
46
-
47
- def __getattr__(name: str) -> Any:
48
- # This is to avoid circular imports
49
- if name == "Input":
50
- return _import_input_class()
51
- if name == "Output":
52
- return _import_output_class()
53
- if name == "RangeSpec":
54
- return RangeSpec
55
- # The other names should work as if they were imported from constants
56
- # Import the constants module langflow.field_typing.constants
57
- from . import constants
58
-
59
- return getattr(constants, name)
60
-
3
+ # Lazy imports - nothing imported at module level except __all__
61
4
 
62
5
  __all__ = [
63
6
  "AgentExecutor",
@@ -78,9 +21,11 @@ __all__ = [
78
21
  "Data",
79
22
  "Document",
80
23
  "Embeddings",
24
+ "Input",
81
25
  "LanguageModel",
82
26
  "NestedDict",
83
27
  "Object",
28
+ "Output",
84
29
  "PromptTemplate",
85
30
  "RangeSpec",
86
31
  "Retriever",
@@ -89,3 +34,57 @@ __all__ = [
89
34
  "Tool",
90
35
  "VectorStore",
91
36
  ]
37
+
38
+ # Names that come from constants module
39
+ _CONSTANTS_NAMES = {
40
+ "AgentExecutor",
41
+ "BaseChatMemory",
42
+ "BaseChatModel",
43
+ "BaseDocumentCompressor",
44
+ "BaseLLM",
45
+ "BaseLanguageModel",
46
+ "BaseLoader",
47
+ "BaseMemory",
48
+ "BaseOutputParser",
49
+ "BasePromptTemplate",
50
+ "BaseRetriever",
51
+ "Callable",
52
+ "Chain",
53
+ "ChatPromptTemplate",
54
+ "Code",
55
+ "Data",
56
+ "Document",
57
+ "Embeddings",
58
+ "LanguageModel",
59
+ "NestedDict",
60
+ "Object",
61
+ "PromptTemplate",
62
+ "Retriever",
63
+ "Text",
64
+ "TextSplitter",
65
+ "Tool",
66
+ "VectorStore",
67
+ }
68
+
69
+
70
+ def __getattr__(name: str) -> Any:
71
+ """Lazy import for all field typing constants."""
72
+ if name == "Input":
73
+ from lfx.template.field.base import Input
74
+
75
+ return Input
76
+ if name == "Output":
77
+ from lfx.template.field.base import Output
78
+
79
+ return Output
80
+ if name == "RangeSpec":
81
+ from .range_spec import RangeSpec
82
+
83
+ return RangeSpec
84
+ if name in _CONSTANTS_NAMES:
85
+ from . import constants
86
+
87
+ return getattr(constants, name)
88
+
89
+ msg = f"module {__name__!r} has no attribute {name!r}"
90
+ raise AttributeError(msg)
lfx/graph/graph/base.py CHANGED
@@ -15,6 +15,9 @@ from functools import partial
15
15
  from itertools import chain
16
16
  from typing import TYPE_CHECKING, Any, cast
17
17
 
18
+ from ag_ui.core import RunFinishedEvent, RunStartedEvent
19
+
20
+ from lfx.events.observability.lifecycle_events import observable
18
21
  from lfx.exceptions.component import ComponentBuildError
19
22
  from lfx.graph.edge.base import CycleEdge, Edge
20
23
  from lfx.graph.graph.constants import Finish, lazy_load_vertex_dict
@@ -728,6 +731,7 @@ class Graph:
728
731
  raise ValueError(msg)
729
732
  vertex.update_raw_params(inputs, overwrite=True)
730
733
 
734
+ @observable
731
735
  async def _run(
732
736
  self,
733
737
  *,
@@ -1549,8 +1553,10 @@ class Graph:
1549
1553
  vertex.result.used_frozen_result = True
1550
1554
  except Exception: # noqa: BLE001
1551
1555
  logger.debug("Error finalizing build", exc_info=True)
1556
+ vertex.built = False
1552
1557
  should_build = True
1553
1558
  except KeyError:
1559
+ vertex.built = False
1554
1560
  should_build = True
1555
1561
 
1556
1562
  if should_build:
@@ -2132,6 +2138,17 @@ class Graph:
2132
2138
  """Get all vertex IDs in the graph."""
2133
2139
  return [vertex.id for vertex in self.vertices]
2134
2140
 
2141
+ def get_terminal_nodes(self) -> list[str]:
2142
+ """Returns vertex IDs that are terminal nodes (not source of any edge).
2143
+
2144
+ Terminal nodes are vertices that have no outgoing edges - they are not
2145
+ listed as source_id in any of the graph's edges.
2146
+
2147
+ Returns:
2148
+ list[str]: List of vertex IDs that are terminal nodes.
2149
+ """
2150
+ return [vertex.id for vertex in self.vertices if not self.successor_map.get(vertex.id, [])]
2151
+
2135
2152
  def sort_vertices(
2136
2153
  self,
2137
2154
  stop_component_id: str | None = None,
@@ -2296,3 +2313,22 @@ class Graph:
2296
2313
  predecessors = [i.id for i in self.get_predecessors(vertex)]
2297
2314
  result |= {vertex_id: {"successors": sucessors, "predecessors": predecessors}}
2298
2315
  return result
2316
+
2317
+ def raw_event_metrics(self, optional_fields: dict | None = None) -> dict:
2318
+ if optional_fields is None:
2319
+ optional_fields = {}
2320
+ import time
2321
+
2322
+ return {"timestamp": time.time(), **optional_fields}
2323
+
2324
+ def before_callback_event(self, *args, **kwargs) -> RunStartedEvent: # noqa: ARG002
2325
+ metrics = {}
2326
+ if hasattr(self, "raw_event_metrics"):
2327
+ metrics = self.raw_event_metrics({"total_components": len(self.vertices)})
2328
+ return RunStartedEvent(run_id=self._run_id, thread_id=self.flow_id, raw_event=metrics)
2329
+
2330
+ def after_callback_event(self, result: Any = None, *args, **kwargs) -> RunFinishedEvent: # noqa: ARG002
2331
+ metrics = {}
2332
+ if hasattr(self, "raw_event_metrics"):
2333
+ metrics = self.raw_event_metrics({"total_components": len(self.vertices)})
2334
+ return RunFinishedEvent(run_id=self._run_id, thread_id=self.flow_id, result=None, raw_event=metrics)
lfx/graph/utils.py CHANGED
@@ -11,7 +11,7 @@ from lfx.schema.data import Data
11
11
  from lfx.schema.message import Message
12
12
 
13
13
  # Database imports removed - lfx should be lightweight
14
- from lfx.services.deps import get_db_service, get_settings_service
14
+ from lfx.services.deps import get_settings_service
15
15
 
16
16
  if TYPE_CHECKING:
17
17
  from lfx.graph.vertex.base import Vertex
@@ -108,32 +108,65 @@ def _vertex_to_primitive_dict(target: Vertex) -> dict:
108
108
  async def log_transaction(
109
109
  flow_id: str | UUID,
110
110
  source: Vertex,
111
- status,
112
- target: Vertex | None = None, # noqa: ARG001
113
- error=None, # noqa: ARG001
111
+ status: str,
112
+ target: Vertex | None = None,
113
+ error: str | Exception | None = None,
114
+ outputs: dict[str, Any] | None = None,
114
115
  ) -> None:
115
116
  """Asynchronously logs a transaction record for a vertex in a flow if transaction storage is enabled.
116
117
 
117
- This is a lightweight implementation that only logs if database service is available.
118
+ Uses the pluggable TransactionService to log transactions. When running within langflow,
119
+ the concrete TransactionService implementation persists to the database.
120
+ When running standalone (lfx only), transactions are not persisted.
121
+
122
+ Args:
123
+ flow_id: The flow ID
124
+ source: The source vertex (component being executed)
125
+ status: Transaction status (success/error)
126
+ target: Optional target vertex (for data transfer logging)
127
+ error: Optional error information
128
+ outputs: Optional explicit outputs dict (component execution results)
118
129
  """
119
130
  try:
120
- settings_service = get_settings_service()
121
- if not settings_service or not getattr(settings_service.settings, "transactions_storage_enabled", False):
131
+ # Guard against null source
132
+ if source is None:
122
133
  return
123
134
 
124
- db_service = get_db_service()
125
- if db_service is None:
126
- logger.debug("Database service not available, skipping transaction logging")
135
+ # Get the transaction service via dependency injection
136
+ from lfx.services.deps import get_transaction_service
137
+
138
+ transaction_service = get_transaction_service()
139
+
140
+ # If no transaction service is available or it's disabled, skip logging
141
+ if transaction_service is None or not transaction_service.is_enabled():
127
142
  return
128
143
 
144
+ # Resolve flow_id
129
145
  if not flow_id:
130
146
  if source.graph.flow_id:
131
147
  flow_id = source.graph.flow_id
132
148
  else:
133
149
  return
134
150
 
135
- # Log basic transaction info - concrete implementation should be in langflow
136
- logger.debug(f"Transaction logged: vertex={source.id}, flow={flow_id}, status={status}")
151
+ # Convert UUID to string for the service interface
152
+ flow_id_str = str(flow_id) if isinstance(flow_id, UUID) else flow_id
153
+
154
+ # Prepare inputs and outputs
155
+ inputs = _vertex_to_primitive_dict(source) if source else None
156
+ target_outputs = _vertex_to_primitive_dict(target) if target else None
157
+ transaction_outputs = outputs if outputs is not None else target_outputs
158
+
159
+ # Log transaction via the service
160
+ await transaction_service.log_transaction(
161
+ flow_id=flow_id_str,
162
+ vertex_id=source.id,
163
+ inputs=inputs,
164
+ outputs=transaction_outputs,
165
+ status=status,
166
+ target_id=target.id if target else None,
167
+ error=str(error) if error else None,
168
+ )
169
+
137
170
  except Exception as exc: # noqa: BLE001
138
171
  logger.debug(f"Error logging transaction: {exc!s}")
139
172