lfx-nightly 0.1.12.dev14__py3-none-any.whl → 0.1.12.dev15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lfx-nightly might be problematic. Click here for more details.

Files changed (127) hide show
  1. lfx/base/agents/events.py +40 -29
  2. lfx/base/constants.py +1 -1
  3. lfx/base/data/docling_utils.py +43 -8
  4. lfx/base/data/utils.py +3 -3
  5. lfx/base/knowledge_bases/__init__.py +3 -0
  6. lfx/base/knowledge_bases/knowledge_base_utils.py +137 -0
  7. lfx/base/models/anthropic_constants.py +3 -1
  8. lfx/base/models/model_input_constants.py +1 -1
  9. lfx/base/vectorstores/vector_store_connection_decorator.py +1 -1
  10. lfx/components/agentql/agentql_api.py +1 -1
  11. lfx/components/agents/agent.py +62 -17
  12. lfx/components/agents/mcp_component.py +11 -1
  13. lfx/components/aiml/aiml.py +4 -1
  14. lfx/components/amazon/amazon_bedrock_converse.py +196 -0
  15. lfx/components/amazon/amazon_bedrock_model.py +5 -1
  16. lfx/components/azure/azure_openai.py +1 -1
  17. lfx/components/azure/azure_openai_embeddings.py +1 -1
  18. lfx/components/clickhouse/clickhouse.py +1 -1
  19. lfx/components/confluence/confluence.py +1 -1
  20. lfx/components/crewai/crewai.py +1 -0
  21. lfx/components/crewai/hierarchical_crew.py +1 -0
  22. lfx/components/crewai/hierarchical_task.py +1 -0
  23. lfx/components/crewai/sequential_crew.py +1 -0
  24. lfx/components/crewai/sequential_task.py +1 -0
  25. lfx/components/crewai/sequential_task_agent.py +1 -0
  26. lfx/components/data/api_request.py +13 -3
  27. lfx/components/data/csv_to_data.py +1 -0
  28. lfx/components/data/file.py +71 -25
  29. lfx/components/data/json_to_data.py +1 -0
  30. lfx/components/datastax/astra_db.py +2 -1
  31. lfx/components/datastax/astra_vectorize.py +3 -5
  32. lfx/components/datastax/astradb_tool.py +5 -1
  33. lfx/components/datastax/astradb_vectorstore.py +8 -1
  34. lfx/components/deactivated/chat_litellm_model.py +1 -1
  35. lfx/components/deactivated/metal.py +1 -1
  36. lfx/components/docling/docling_inline.py +23 -9
  37. lfx/components/elastic/elasticsearch.py +1 -1
  38. lfx/components/elastic/opensearch.py +1 -1
  39. lfx/components/embeddings/similarity.py +1 -0
  40. lfx/components/embeddings/text_embedder.py +1 -0
  41. lfx/components/firecrawl/firecrawl_crawl_api.py +1 -1
  42. lfx/components/firecrawl/firecrawl_extract_api.py +1 -1
  43. lfx/components/firecrawl/firecrawl_map_api.py +1 -1
  44. lfx/components/firecrawl/firecrawl_scrape_api.py +1 -1
  45. lfx/components/google/gmail.py +1 -0
  46. lfx/components/google/google_generative_ai_embeddings.py +1 -1
  47. lfx/components/helpers/memory.py +8 -6
  48. lfx/components/helpers/output_parser.py +1 -0
  49. lfx/components/helpers/store_message.py +1 -0
  50. lfx/components/huggingface/huggingface.py +3 -1
  51. lfx/components/huggingface/huggingface_inference_api.py +1 -1
  52. lfx/components/ibm/watsonx.py +1 -1
  53. lfx/components/ibm/watsonx_embeddings.py +1 -1
  54. lfx/components/icosacomputing/combinatorial_reasoner.py +1 -1
  55. lfx/components/input_output/chat.py +0 -27
  56. lfx/components/input_output/chat_output.py +3 -27
  57. lfx/components/knowledge_bases/__init__.py +34 -0
  58. lfx/components/knowledge_bases/ingestion.py +686 -0
  59. lfx/components/knowledge_bases/retrieval.py +256 -0
  60. lfx/components/langchain_utilities/langchain_hub.py +1 -1
  61. lfx/components/langwatch/langwatch.py +1 -1
  62. lfx/components/logic/conditional_router.py +40 -3
  63. lfx/components/logic/data_conditional_router.py +1 -0
  64. lfx/components/logic/flow_tool.py +2 -1
  65. lfx/components/logic/pass_message.py +1 -0
  66. lfx/components/logic/sub_flow.py +2 -1
  67. lfx/components/milvus/milvus.py +1 -1
  68. lfx/components/olivya/olivya.py +1 -1
  69. lfx/components/processing/alter_metadata.py +1 -0
  70. lfx/components/processing/combine_text.py +1 -0
  71. lfx/components/processing/create_data.py +1 -0
  72. lfx/components/processing/data_to_dataframe.py +1 -0
  73. lfx/components/processing/extract_key.py +1 -0
  74. lfx/components/processing/filter_data.py +1 -0
  75. lfx/components/processing/filter_data_values.py +1 -0
  76. lfx/components/processing/json_cleaner.py +1 -0
  77. lfx/components/processing/merge_data.py +1 -0
  78. lfx/components/processing/message_to_data.py +1 -0
  79. lfx/components/processing/parse_data.py +1 -0
  80. lfx/components/processing/parse_dataframe.py +1 -0
  81. lfx/components/processing/parse_json_data.py +1 -0
  82. lfx/components/processing/regex.py +1 -0
  83. lfx/components/processing/select_data.py +1 -0
  84. lfx/components/processing/structured_output.py +7 -3
  85. lfx/components/processing/update_data.py +1 -0
  86. lfx/components/prototypes/__init__.py +8 -7
  87. lfx/components/qdrant/qdrant.py +1 -1
  88. lfx/components/redis/redis_chat.py +1 -1
  89. lfx/components/tools/__init__.py +0 -6
  90. lfx/components/tools/calculator.py +2 -1
  91. lfx/components/tools/python_code_structured_tool.py +1 -0
  92. lfx/components/tools/python_repl.py +2 -1
  93. lfx/components/tools/search_api.py +2 -1
  94. lfx/components/tools/serp_api.py +2 -1
  95. lfx/components/tools/tavily_search_tool.py +1 -0
  96. lfx/components/tools/wikidata_api.py +2 -1
  97. lfx/components/tools/wikipedia_api.py +2 -1
  98. lfx/components/tools/yahoo_finance.py +2 -1
  99. lfx/components/twelvelabs/video_embeddings.py +1 -1
  100. lfx/components/upstash/upstash.py +1 -1
  101. lfx/components/vectorstores/astradb_graph.py +8 -1
  102. lfx/components/vectorstores/local_db.py +1 -0
  103. lfx/components/vectorstores/weaviate.py +1 -1
  104. lfx/components/wolframalpha/wolfram_alpha_api.py +1 -1
  105. lfx/components/zep/zep.py +2 -1
  106. lfx/custom/attributes.py +1 -0
  107. lfx/graph/graph/base.py +61 -4
  108. lfx/inputs/inputs.py +1 -0
  109. lfx/log/logger.py +31 -11
  110. lfx/schema/message.py +6 -1
  111. lfx/schema/schema.py +4 -0
  112. lfx/services/__init__.py +3 -0
  113. lfx/services/mcp_composer/__init__.py +6 -0
  114. lfx/services/mcp_composer/factory.py +16 -0
  115. lfx/services/mcp_composer/service.py +599 -0
  116. lfx/services/schema.py +1 -0
  117. lfx/services/settings/auth.py +18 -15
  118. lfx/services/settings/base.py +38 -0
  119. lfx/services/settings/constants.py +4 -1
  120. lfx/services/settings/feature_flags.py +0 -1
  121. lfx/template/frontend_node/base.py +2 -0
  122. lfx/utils/image.py +1 -1
  123. {lfx_nightly-0.1.12.dev14.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/METADATA +1 -1
  124. {lfx_nightly-0.1.12.dev14.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/RECORD +126 -118
  125. lfx/components/datastax/astradb.py +0 -1285
  126. {lfx_nightly-0.1.12.dev14.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/WHEEL +0 -0
  127. {lfx_nightly-0.1.12.dev14.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/entry_points.txt +0 -0
@@ -13,11 +13,12 @@ from lfx.schema.data import Data
13
13
 
14
14
 
15
15
  class CalculatorToolComponent(LCToolComponent):
16
- display_name = "Calculator [DEPRECATED]"
16
+ display_name = "Calculator"
17
17
  description = "Perform basic arithmetic operations on a given expression."
18
18
  icon = "calculator"
19
19
  name = "CalculatorTool"
20
20
  legacy = True
21
+ replacement = ["helpers.CalculatorComponent"]
21
22
 
22
23
  inputs = [
23
24
  MessageTextInput(
@@ -37,6 +37,7 @@ class PythonCodeStructuredTool(LCToolComponent):
37
37
  icon = "Python"
38
38
  field_order = ["name", "description", "tool_code", "return_direct", "tool_function"]
39
39
  legacy: bool = True
40
+ replacement = ["processing.PythonREPLComponent"]
40
41
 
41
42
  inputs = [
42
43
  MultilineInput(
@@ -13,11 +13,12 @@ from lfx.schema.data import Data
13
13
 
14
14
 
15
15
  class PythonREPLToolComponent(LCToolComponent):
16
- display_name = "Python REPL [DEPRECATED]"
16
+ display_name = "Python REPL"
17
17
  description = "A tool for running Python code in a REPL environment."
18
18
  name = "PythonREPLTool"
19
19
  icon = "Python"
20
20
  legacy = True
21
+ replacement = ["processing.PythonREPLComponent"]
21
22
 
22
23
  inputs = [
23
24
  StrInput(
@@ -11,12 +11,13 @@ from lfx.schema.data import Data
11
11
 
12
12
 
13
13
  class SearchAPIComponent(LCToolComponent):
14
- display_name: str = "Search API [DEPRECATED]"
14
+ display_name: str = "Search API"
15
15
  description: str = "Call the searchapi.io API with result limiting"
16
16
  name = "SearchAPI"
17
17
  documentation: str = "https://www.searchapi.io/docs/google"
18
18
  icon = "SearchAPI"
19
19
  legacy = True
20
+ replacement = ["searchapi.SearchComponent"]
20
21
 
21
22
  inputs = [
22
23
  MessageTextInput(name="engine", display_name="Engine", value="google"),
@@ -30,11 +30,12 @@ class SerpAPISchema(BaseModel):
30
30
 
31
31
 
32
32
  class SerpAPIComponent(LCToolComponent):
33
- display_name = "Serp Search API [DEPRECATED]"
33
+ display_name = "Serp Search API"
34
34
  description = "Call Serp Search API with result limiting"
35
35
  name = "SerpAPI"
36
36
  icon = "SerpSearch"
37
37
  legacy = True
38
+ replacement = ["serpapi.Serp"]
38
39
 
39
40
  inputs = [
40
41
  SecretStrInput(name="serpapi_api_key", display_name="SerpAPI API Key", required=True),
@@ -81,6 +81,7 @@ Note: Check 'Advanced' for all options.
81
81
  name = "TavilyAISearch"
82
82
  documentation = "https://docs.tavily.com/"
83
83
  legacy = True
84
+ replacement = ["tavily.TavilySearchComponent"]
84
85
 
85
86
  inputs = [
86
87
  SecretStrInput(
@@ -53,11 +53,12 @@ class WikidataAPIWrapper(BaseModel):
53
53
 
54
54
 
55
55
  class WikidataAPIComponent(LCToolComponent):
56
- display_name = "Wikidata API [Deprecated]"
56
+ display_name = "Wikidata API"
57
57
  description = "Performs a search using the Wikidata API."
58
58
  name = "WikidataAPI"
59
59
  icon = "Wikipedia"
60
60
  legacy = True
61
+ replacement = ["wikipedia.WikidataComponent"]
61
62
 
62
63
  inputs = [
63
64
  MultilineInput(
@@ -10,11 +10,12 @@ from lfx.schema.data import Data
10
10
 
11
11
 
12
12
  class WikipediaAPIComponent(LCToolComponent):
13
- display_name = "Wikipedia API [Deprecated]"
13
+ display_name = "Wikipedia API"
14
14
  description = "Call Wikipedia API."
15
15
  name = "WikipediaAPI"
16
16
  icon = "Wikipedia"
17
17
  legacy = True
18
+ replacement = ["wikipedia.WikipediaComponent"]
18
19
 
19
20
  inputs = [
20
21
  MultilineInput(
@@ -48,12 +48,13 @@ class YahooFinanceSchema(BaseModel):
48
48
 
49
49
 
50
50
  class YfinanceToolComponent(LCToolComponent):
51
- display_name = "Yahoo! Finance [DEPRECATED]"
51
+ display_name = "Yahoo! Finance"
52
52
  description = """Uses [yfinance](https://pypi.org/project/yfinance/) (unofficial package) \
53
53
  to access financial data and market information from Yahoo! Finance."""
54
54
  icon = "trending-up"
55
55
  name = "YahooFinanceTool"
56
56
  legacy = True
57
+ replacement = ["yahoosearch.YfinanceComponent"]
57
58
 
58
59
  inputs = [
59
60
  MessageTextInput(
@@ -85,7 +85,7 @@ class TwelveLabsVideoEmbeddingsComponent(LCEmbeddingsModel):
85
85
  icon = "TwelveLabs"
86
86
  documentation = "https://github.com/twelvelabs-io/twelvelabs-developer-experience/blob/main/integrations/Langflow/TWELVE_LABS_COMPONENTS_README.md"
87
87
  inputs = [
88
- SecretStrInput(name="api_key", display_name="API Key", required=True),
88
+ SecretStrInput(name="api_key", display_name="TwelveLabs API Key", required=True),
89
89
  DropdownInput(
90
90
  name="model_name",
91
91
  display_name="Model",
@@ -27,7 +27,7 @@ class UpstashVectorStoreComponent(LCVectorStoreComponent):
27
27
  ),
28
28
  SecretStrInput(
29
29
  name="index_token",
30
- display_name="Index Token",
30
+ display_name="Upstash Index Token",
31
31
  info="The token for the Upstash index.",
32
32
  required=True,
33
33
  ),
@@ -1,7 +1,7 @@
1
1
  import os
2
2
 
3
3
  import orjson
4
- from astrapy.admin import parse_api_endpoint
4
+ from langchain_core.documents import Document
5
5
 
6
6
  from lfx.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
7
7
  from lfx.helpers.data import docs_to_data
@@ -16,6 +16,7 @@ from lfx.inputs.inputs import (
16
16
  StrInput,
17
17
  )
18
18
  from lfx.schema.data import Data
19
+ from lfx.serialization import serialize
19
20
 
20
21
 
21
22
  class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
@@ -174,6 +175,7 @@ class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
174
175
  @check_cached_vector_store
175
176
  def build_vector_store(self):
176
177
  try:
178
+ from astrapy.admin import parse_api_endpoint
177
179
  from langchain_astradb import AstraDBGraphVectorStore
178
180
  from langchain_astradb.utils.astradb import SetupMode
179
181
  except ImportError as e:
@@ -236,6 +238,11 @@ class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
236
238
  msg = "Vector Store Inputs must be Data objects."
237
239
  raise TypeError(msg)
238
240
 
241
+ # Serialize metadata to handle Properties objects and other non-JSON serializable types
242
+ documents = [
243
+ Document(page_content=doc.page_content, metadata=serialize(doc.metadata, to_str=True)) for doc in documents
244
+ ]
245
+
239
246
  if documents:
240
247
  self.log(f"Adding {len(documents)} documents to the Vector Store.")
241
248
  try:
@@ -22,6 +22,7 @@ class LocalDBComponent(LCVectorStoreComponent):
22
22
  name = "LocalDB"
23
23
  icon = "database"
24
24
  legacy = True
25
+ replacement = ["knowledgebases.KnowledgeRetrieval", "knowledgebases.KnowledgeIngestion"]
25
26
 
26
27
  inputs = [
27
28
  TabInput(
@@ -15,7 +15,7 @@ class WeaviateVectorStoreComponent(LCVectorStoreComponent):
15
15
 
16
16
  inputs = [
17
17
  StrInput(name="url", display_name="Weaviate URL", value="http://localhost:8080", required=True),
18
- SecretStrInput(name="api_key", display_name="API Key", required=False),
18
+ SecretStrInput(name="api_key", display_name="Weaviate API Key", required=False),
19
19
  StrInput(
20
20
  name="index_name",
21
21
  display_name="Index Name",
@@ -22,7 +22,7 @@ topics, delivering structured responses."""
22
22
  MultilineInput(
23
23
  name="input_value", display_name="Input Query", info="Example query: 'What is the population of France?'"
24
24
  ),
25
- SecretStrInput(name="app_id", display_name="App ID", required=True),
25
+ SecretStrInput(name="app_id", display_name="WolframAlpha App ID", required=True),
26
26
  ]
27
27
 
28
28
  icon = "WolframAlphaAPI"
lfx/components/zep/zep.py CHANGED
@@ -9,10 +9,11 @@ class ZepChatMemory(LCChatMemoryComponent):
9
9
  name = "ZepChatMemory"
10
10
  icon = "ZepMemory"
11
11
  legacy = True
12
+ replacement = ["helpers.Memory"]
12
13
 
13
14
  inputs = [
14
15
  MessageTextInput(name="url", display_name="Zep URL", info="URL of the Zep instance."),
15
- SecretStrInput(name="api_key", display_name="API Key", info="API Key for the Zep instance."),
16
+ SecretStrInput(name="api_key", display_name="Zep API Key", info="API Key for the Zep instance."),
16
17
  DropdownInput(
17
18
  name="api_base_path",
18
19
  display_name="API Base Path",
lfx/custom/attributes.py CHANGED
@@ -69,6 +69,7 @@ ATTR_FUNC_MAPPING: dict[str, Callable] = {
69
69
  "description": getattr_return_str,
70
70
  "beta": getattr_return_bool,
71
71
  "legacy": getattr_return_bool,
72
+ "replacement": getattr_return_list_of_str,
72
73
  "documentation": getattr_return_str,
73
74
  "priority": getattr_return_int,
74
75
  "icon": validate_icon,
lfx/graph/graph/base.py CHANGED
@@ -102,6 +102,9 @@ class Graph:
102
102
  self.vertices_to_run: set[str] = set()
103
103
  self.stop_vertex: str | None = None
104
104
  self.inactive_vertices: set = set()
105
+ # Conditional routing system (separate from ACTIVE/INACTIVE cycle management)
106
+ self.conditionally_excluded_vertices: set = set() # Vertices excluded by conditional routing
107
+ self.conditional_exclusion_sources: dict[str, set[str]] = {} # Maps source vertex -> excluded vertices
105
108
  self.edges: list[CycleEdge] = []
106
109
  self.vertices: list[Vertex] = []
107
110
  self.run_manager = RunnableVerticesManager()
@@ -966,6 +969,59 @@ class Graph:
966
969
  vertices_to_run=self.vertices_to_run,
967
970
  )
968
971
 
972
+ def exclude_branch_conditionally(self, vertex_id: str, output_name: str | None = None) -> None:
973
+ """Marks a branch as conditionally excluded (for conditional routing).
974
+
975
+ This system is separate from the ACTIVE/INACTIVE state used for cycle management:
976
+ - ACTIVE/INACTIVE: Reset after each cycle iteration to allow cycles to continue
977
+ - Conditional exclusion: Persists until explicitly cleared by the same source vertex
978
+
979
+ Used by ConditionalRouter to ensure only one branch executes per condition evaluation.
980
+ If this vertex has previously excluded branches, they are cleared first to allow
981
+ re-evaluation on subsequent iterations (e.g., in cycles where condition may change).
982
+
983
+ Args:
984
+ vertex_id: The source vertex making the exclusion decision
985
+ output_name: The output name to follow when excluding downstream vertices
986
+ """
987
+ # Clear any previous exclusions from this source vertex
988
+ if vertex_id in self.conditional_exclusion_sources:
989
+ previous_exclusions = self.conditional_exclusion_sources[vertex_id]
990
+ self.conditionally_excluded_vertices -= previous_exclusions
991
+ del self.conditional_exclusion_sources[vertex_id]
992
+
993
+ # Now exclude the new branch
994
+ visited: set[str] = set()
995
+ excluded: set[str] = set()
996
+ self._exclude_branch_conditionally(vertex_id, visited, excluded, output_name, skip_first=True)
997
+
998
+ # Track which vertices this source excluded
999
+ if excluded:
1000
+ self.conditional_exclusion_sources[vertex_id] = excluded
1001
+
1002
+ def _exclude_branch_conditionally(
1003
+ self, vertex_id: str, visited: set, excluded: set, output_name: str | None = None, *, skip_first: bool = False
1004
+ ) -> None:
1005
+ """Recursively excludes vertices in a branch for conditional routing."""
1006
+ if vertex_id in visited:
1007
+ return
1008
+ visited.add(vertex_id)
1009
+
1010
+ # Don't exclude the first vertex (the router itself)
1011
+ if not skip_first:
1012
+ self.conditionally_excluded_vertices.add(vertex_id)
1013
+ excluded.add(vertex_id)
1014
+
1015
+ for child_id in self.parent_child_map[vertex_id]:
1016
+ # If we're at the router (skip_first=True) and have an output_name,
1017
+ # only follow edges from that specific output
1018
+ if skip_first and output_name:
1019
+ edge = self.get_edge(vertex_id, child_id)
1020
+ if edge and edge.source_handle.name != output_name:
1021
+ continue
1022
+ # After the first level, exclude all descendants
1023
+ self._exclude_branch_conditionally(child_id, visited, excluded, output_name=None, skip_first=False)
1024
+
969
1025
  def get_edge(self, source_id: str, target_id: str) -> CycleEdge | None:
970
1026
  """Returns the edge between two vertices."""
971
1027
  for edge in self.edges:
@@ -2135,6 +2191,9 @@ class Graph:
2135
2191
 
2136
2192
  def is_vertex_runnable(self, vertex_id: str) -> bool:
2137
2193
  """Returns whether a vertex is runnable."""
2194
+ # Check if vertex is conditionally excluded (for conditional routing)
2195
+ if vertex_id in self.conditionally_excluded_vertices:
2196
+ return False
2138
2197
  is_active = self.get_vertex(vertex_id).is_active()
2139
2198
  is_loop = self.get_vertex(vertex_id).is_loop
2140
2199
  return self.run_manager.is_vertex_runnable(vertex_id, is_active=is_active, is_loop=is_loop)
@@ -2167,10 +2226,8 @@ class Graph:
2167
2226
  if predecessor_id in visited:
2168
2227
  return
2169
2228
  visited.add(predecessor_id)
2170
- predecessor_vertex = self.get_vertex(predecessor_id)
2171
- is_active = predecessor_vertex.is_active()
2172
- is_loop = predecessor_vertex.is_loop
2173
- if self.run_manager.is_vertex_runnable(predecessor_id, is_active=is_active, is_loop=is_loop):
2229
+
2230
+ if self.is_vertex_runnable(predecessor_id):
2174
2231
  runnable_vertices.append(predecessor_id)
2175
2232
  else:
2176
2233
  for pred_pred_id in self.run_manager.run_predecessors.get(predecessor_id, []):
lfx/inputs/inputs.py CHANGED
@@ -493,6 +493,7 @@ class DropdownInput(BaseInputMixin, DropDownMixin, MetadataTraceMixin, ToolModeM
493
493
  options_metadata: list[dict[str, Any]] = Field(default_factory=list)
494
494
  combobox: CoalesceBool = False
495
495
  dialog_inputs: dict[str, Any] = Field(default_factory=dict)
496
+ external_options: dict[str, Any] = Field(default_factory=dict)
496
497
  toggle: bool = False
497
498
  toggle_disable: bool | None = None
498
499
  toggle_value: bool | None = None
lfx/log/logger.py CHANGED
@@ -218,9 +218,11 @@ def configure(
218
218
  if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None:
219
219
  log_level = os.getenv("LANGFLOW_LOG_LEVEL")
220
220
 
221
- requested_min_level = LOG_LEVEL_MAP.get(
222
- (log_level or os.getenv("LANGFLOW_LOG_LEVEL", "ERROR")).upper(), logging.ERROR
223
- )
221
+ log_level_str = os.getenv("LANGFLOW_LOG_LEVEL", "ERROR")
222
+ if log_level is not None:
223
+ log_level_str = log_level
224
+
225
+ requested_min_level = LOG_LEVEL_MAP.get(log_level_str.upper(), logging.ERROR)
224
226
  if current_min_level == requested_min_level:
225
227
  return
226
228
 
@@ -243,20 +245,38 @@ def configure(
243
245
  structlog.contextvars.merge_contextvars,
244
246
  structlog.processors.add_log_level,
245
247
  structlog.processors.TimeStamper(fmt="iso"),
246
- add_serialized,
247
- remove_exception_in_production,
248
- buffer_writer,
249
248
  ]
250
249
 
250
+ # Add callsite information only when LANGFLOW_DEV is set
251
+ if DEV:
252
+ processors.append(
253
+ structlog.processors.CallsiteParameterAdder(
254
+ parameters=[
255
+ structlog.processors.CallsiteParameter.FILENAME,
256
+ structlog.processors.CallsiteParameter.FUNC_NAME,
257
+ structlog.processors.CallsiteParameter.LINENO,
258
+ ]
259
+ )
260
+ )
261
+
262
+ processors.extend(
263
+ [
264
+ add_serialized,
265
+ remove_exception_in_production,
266
+ buffer_writer,
267
+ ]
268
+ )
269
+
251
270
  # Configure output based on environment
252
271
  if log_env.lower() == "container" or log_env.lower() == "container_json":
253
272
  processors.append(structlog.processors.JSONRenderer())
254
273
  elif log_env.lower() == "container_csv":
255
- processors.append(
256
- structlog.processors.KeyValueRenderer(
257
- key_order=["timestamp", "level", "module", "event"], drop_missing=True
258
- )
259
- )
274
+ # Include callsite fields in key order when DEV is enabled
275
+ key_order = ["timestamp", "level", "event"]
276
+ if DEV:
277
+ key_order += ["filename", "func_name", "lineno"]
278
+
279
+ processors.append(structlog.processors.KeyValueRenderer(key_order=key_order, drop_missing=True))
260
280
  else:
261
281
  # Use rich console for pretty printing based on environment variable
262
282
  log_stdout_pretty = os.getenv("LANGFLOW_PRETTY_LOGS", "true").lower() == "true"
lfx/schema/message.py CHANGED
@@ -11,7 +11,7 @@ from uuid import UUID
11
11
 
12
12
  from fastapi.encoders import jsonable_encoder
13
13
  from langchain_core.load import load
14
- from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage
14
+ from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage, ToolMessage
15
15
  from langchain_core.prompts.chat import BaseChatPromptTemplate, ChatPromptTemplate
16
16
  from langchain_core.prompts.prompt import PromptTemplate
17
17
  from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_serializer, field_validator
@@ -158,6 +158,9 @@ class Message(Data):
158
158
  elif lc_message.type == "system":
159
159
  sender = "System"
160
160
  sender_name = "System"
161
+ elif lc_message.type == "tool":
162
+ sender = "Tool"
163
+ sender_name = "Tool"
161
164
  else:
162
165
  sender = lc_message.type
163
166
  sender_name = lc_message.type
@@ -222,6 +225,8 @@ class Message(Data):
222
225
  messages.append(SystemMessage(content=message.get("content")))
223
226
  case _ if message.get("type") == "ai":
224
227
  messages.append(AIMessage(content=message.get("content")))
228
+ case _ if message.get("type") == "tool":
229
+ messages.append(ToolMessage(content=message.get("content")))
225
230
 
226
231
  self.prompt["kwargs"]["messages"] = messages
227
232
  return load(self.prompt)
lfx/schema/schema.py CHANGED
@@ -2,6 +2,7 @@ from collections.abc import Generator
2
2
  from enum import Enum
3
3
  from typing import TYPE_CHECKING, Literal
4
4
 
5
+ from pandas import Series
5
6
  from pydantic import BaseModel, ConfigDict, Field
6
7
  from typing_extensions import TypedDict
7
8
 
@@ -84,6 +85,9 @@ def get_message(payload):
84
85
  if message is None and isinstance(payload, dict | str | Data):
85
86
  message = payload.data if isinstance(payload, Data) else payload
86
87
 
88
+ if isinstance(message, Series):
89
+ return message if not message.empty else payload
90
+
87
91
  return message or payload
88
92
 
89
93
 
lfx/services/__init__.py CHANGED
@@ -8,12 +8,15 @@ from .interfaces import (
8
8
  VariableServiceProtocol,
9
9
  )
10
10
  from .manager import ServiceManager
11
+ from .mcp_composer import MCPComposerService, MCPComposerServiceFactory
11
12
  from .session import NoopSession
12
13
 
13
14
  __all__ = [
14
15
  "CacheServiceProtocol",
15
16
  "ChatServiceProtocol",
16
17
  "DatabaseServiceProtocol",
18
+ "MCPComposerService",
19
+ "MCPComposerServiceFactory",
17
20
  "NoopSession",
18
21
  "ServiceManager",
19
22
  "SettingsServiceProtocol",
@@ -0,0 +1,6 @@
1
+ """MCP Composer service for Langflow."""
2
+
3
+ from lfx.services.mcp_composer.factory import MCPComposerServiceFactory
4
+ from lfx.services.mcp_composer.service import MCPComposerService
5
+
6
+ __all__ = ["MCPComposerService", "MCPComposerServiceFactory"]
@@ -0,0 +1,16 @@
1
+ """Factory for creating MCP Composer service instances."""
2
+
3
+ from lfx.services.factory import ServiceFactory
4
+ from lfx.services.mcp_composer.service import MCPComposerService
5
+
6
+
7
+ class MCPComposerServiceFactory(ServiceFactory):
8
+ """Factory for creating MCP Composer service instances."""
9
+
10
+ def __init__(self):
11
+ super().__init__()
12
+ self.service_class = MCPComposerService
13
+
14
+ def create(self, **kwargs): # noqa: ARG002
15
+ """Create a new MCP Composer service instance."""
16
+ return MCPComposerService()