lfx-nightly 0.1.12.dev13__py3-none-any.whl → 0.1.12.dev15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/base/agents/events.py +40 -29
- lfx/base/constants.py +1 -1
- lfx/base/data/docling_utils.py +43 -8
- lfx/base/data/utils.py +3 -3
- lfx/base/knowledge_bases/__init__.py +3 -0
- lfx/base/knowledge_bases/knowledge_base_utils.py +137 -0
- lfx/base/models/anthropic_constants.py +3 -1
- lfx/base/models/model_input_constants.py +1 -1
- lfx/base/vectorstores/vector_store_connection_decorator.py +1 -1
- lfx/components/agentql/agentql_api.py +1 -1
- lfx/components/agents/agent.py +62 -17
- lfx/components/agents/mcp_component.py +11 -1
- lfx/components/aiml/aiml.py +4 -1
- lfx/components/amazon/amazon_bedrock_converse.py +196 -0
- lfx/components/amazon/amazon_bedrock_model.py +5 -1
- lfx/components/azure/azure_openai.py +1 -1
- lfx/components/azure/azure_openai_embeddings.py +1 -1
- lfx/components/clickhouse/clickhouse.py +1 -1
- lfx/components/confluence/confluence.py +1 -1
- lfx/components/crewai/crewai.py +1 -0
- lfx/components/crewai/hierarchical_crew.py +1 -0
- lfx/components/crewai/hierarchical_task.py +1 -0
- lfx/components/crewai/sequential_crew.py +1 -0
- lfx/components/crewai/sequential_task.py +1 -0
- lfx/components/crewai/sequential_task_agent.py +1 -0
- lfx/components/data/api_request.py +13 -3
- lfx/components/data/csv_to_data.py +1 -0
- lfx/components/data/file.py +71 -25
- lfx/components/data/json_to_data.py +1 -0
- lfx/components/datastax/astra_db.py +2 -1
- lfx/components/datastax/astra_vectorize.py +3 -5
- lfx/components/datastax/astradb_tool.py +5 -1
- lfx/components/datastax/astradb_vectorstore.py +8 -1
- lfx/components/deactivated/chat_litellm_model.py +1 -1
- lfx/components/deactivated/metal.py +1 -1
- lfx/components/docling/docling_inline.py +23 -9
- lfx/components/elastic/elasticsearch.py +1 -1
- lfx/components/elastic/opensearch.py +1 -1
- lfx/components/embeddings/similarity.py +1 -0
- lfx/components/embeddings/text_embedder.py +1 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +1 -1
- lfx/components/firecrawl/firecrawl_extract_api.py +1 -1
- lfx/components/firecrawl/firecrawl_map_api.py +1 -1
- lfx/components/firecrawl/firecrawl_scrape_api.py +1 -1
- lfx/components/google/gmail.py +1 -0
- lfx/components/google/google_generative_ai_embeddings.py +1 -1
- lfx/components/helpers/memory.py +8 -6
- lfx/components/helpers/output_parser.py +1 -0
- lfx/components/helpers/store_message.py +1 -0
- lfx/components/huggingface/huggingface.py +3 -1
- lfx/components/huggingface/huggingface_inference_api.py +1 -1
- lfx/components/ibm/watsonx.py +1 -1
- lfx/components/ibm/watsonx_embeddings.py +1 -1
- lfx/components/icosacomputing/combinatorial_reasoner.py +1 -1
- lfx/components/input_output/chat.py +0 -27
- lfx/components/input_output/chat_output.py +3 -27
- lfx/components/knowledge_bases/__init__.py +34 -0
- lfx/components/knowledge_bases/ingestion.py +686 -0
- lfx/components/knowledge_bases/retrieval.py +256 -0
- lfx/components/langchain_utilities/langchain_hub.py +1 -1
- lfx/components/langwatch/langwatch.py +1 -1
- lfx/components/logic/conditional_router.py +40 -3
- lfx/components/logic/data_conditional_router.py +1 -0
- lfx/components/logic/flow_tool.py +2 -1
- lfx/components/logic/pass_message.py +1 -0
- lfx/components/logic/sub_flow.py +2 -1
- lfx/components/milvus/milvus.py +1 -1
- lfx/components/olivya/olivya.py +1 -1
- lfx/components/processing/alter_metadata.py +1 -0
- lfx/components/processing/combine_text.py +1 -0
- lfx/components/processing/create_data.py +1 -0
- lfx/components/processing/data_to_dataframe.py +1 -0
- lfx/components/processing/extract_key.py +1 -0
- lfx/components/processing/filter_data.py +1 -0
- lfx/components/processing/filter_data_values.py +1 -0
- lfx/components/processing/json_cleaner.py +1 -0
- lfx/components/processing/merge_data.py +1 -0
- lfx/components/processing/message_to_data.py +1 -0
- lfx/components/processing/parse_data.py +1 -0
- lfx/components/processing/parse_dataframe.py +1 -0
- lfx/components/processing/parse_json_data.py +1 -0
- lfx/components/processing/regex.py +1 -0
- lfx/components/processing/select_data.py +1 -0
- lfx/components/processing/structured_output.py +7 -3
- lfx/components/processing/update_data.py +1 -0
- lfx/components/prototypes/__init__.py +8 -7
- lfx/components/qdrant/qdrant.py +1 -1
- lfx/components/redis/redis_chat.py +1 -1
- lfx/components/tools/__init__.py +0 -6
- lfx/components/tools/calculator.py +2 -1
- lfx/components/tools/python_code_structured_tool.py +1 -0
- lfx/components/tools/python_repl.py +2 -1
- lfx/components/tools/search_api.py +2 -1
- lfx/components/tools/serp_api.py +2 -1
- lfx/components/tools/tavily_search_tool.py +1 -0
- lfx/components/tools/wikidata_api.py +2 -1
- lfx/components/tools/wikipedia_api.py +2 -1
- lfx/components/tools/yahoo_finance.py +2 -1
- lfx/components/twelvelabs/video_embeddings.py +1 -1
- lfx/components/upstash/upstash.py +1 -1
- lfx/components/vectorstores/astradb_graph.py +8 -1
- lfx/components/vectorstores/local_db.py +1 -0
- lfx/components/vectorstores/weaviate.py +1 -1
- lfx/components/wolframalpha/wolfram_alpha_api.py +1 -1
- lfx/components/zep/zep.py +2 -1
- lfx/custom/attributes.py +1 -0
- lfx/graph/graph/base.py +61 -4
- lfx/inputs/inputs.py +1 -0
- lfx/log/logger.py +31 -11
- lfx/schema/message.py +6 -1
- lfx/schema/schema.py +4 -0
- lfx/services/__init__.py +3 -0
- lfx/services/mcp_composer/__init__.py +6 -0
- lfx/services/mcp_composer/factory.py +16 -0
- lfx/services/mcp_composer/service.py +599 -0
- lfx/services/schema.py +1 -0
- lfx/services/settings/auth.py +18 -15
- lfx/services/settings/base.py +38 -0
- lfx/services/settings/constants.py +4 -1
- lfx/services/settings/feature_flags.py +0 -1
- lfx/template/frontend_node/base.py +2 -0
- lfx/utils/image.py +1 -1
- {lfx_nightly-0.1.12.dev13.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.12.dev13.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/RECORD +126 -118
- lfx/components/datastax/astradb.py +0 -1285
- {lfx_nightly-0.1.12.dev13.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.12.dev13.dist-info → lfx_nightly-0.1.12.dev15.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,599 @@
|
|
|
1
|
+
"""MCP Composer service for proxying and orchestrating MCP servers."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
import select
|
|
7
|
+
import socket
|
|
8
|
+
import subprocess
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
from functools import wraps
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from lfx.log.logger import logger
|
|
14
|
+
from lfx.services.base import Service
|
|
15
|
+
from lfx.services.deps import get_settings_service
|
|
16
|
+
|
|
17
|
+
GENERIC_STARTUP_ERROR_MSG = (
|
|
18
|
+
"MCP Composer startup failed. Check OAuth configuration and check logs for more information."
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class MCPComposerError(Exception):
|
|
23
|
+
"""Base exception for MCP Composer errors."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, message: str | None, project_id: str | None = None):
|
|
26
|
+
if not message:
|
|
27
|
+
message = GENERIC_STARTUP_ERROR_MSG
|
|
28
|
+
self.message = message
|
|
29
|
+
self.project_id = project_id
|
|
30
|
+
super().__init__(message)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class MCPComposerPortError(MCPComposerError):
|
|
34
|
+
"""Port is already in use or unavailable."""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MCPComposerConfigError(MCPComposerError):
|
|
38
|
+
"""Invalid configuration provided."""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MCPComposerDisabledError(MCPComposerError):
|
|
42
|
+
"""MCP Composer is disabled in settings."""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class MCPComposerStartupError(MCPComposerError):
|
|
46
|
+
"""Failed to start MCP Composer process."""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def require_composer_enabled(func: Callable) -> Callable:
|
|
50
|
+
"""Decorator that checks if MCP Composer is enabled before executing the method."""
|
|
51
|
+
|
|
52
|
+
@wraps(func)
|
|
53
|
+
def wrapper(self, *args, **kwargs):
|
|
54
|
+
if not get_settings_service().settings.mcp_composer_enabled:
|
|
55
|
+
project_id = kwargs.get("project_id")
|
|
56
|
+
error_msg = "MCP Composer is disabled in settings"
|
|
57
|
+
raise MCPComposerDisabledError(error_msg, project_id)
|
|
58
|
+
|
|
59
|
+
return func(self, *args, **kwargs)
|
|
60
|
+
|
|
61
|
+
return wrapper
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class MCPComposerService(Service):
|
|
65
|
+
"""Service for managing per-project MCP Composer instances."""
|
|
66
|
+
|
|
67
|
+
name = "mcp_composer_service"
|
|
68
|
+
|
|
69
|
+
def __init__(self):
|
|
70
|
+
super().__init__()
|
|
71
|
+
self.project_composers: dict[str, dict] = {} # project_id -> {process, host, port, sse_url, auth_config}
|
|
72
|
+
self._start_locks: dict[
|
|
73
|
+
str, asyncio.Lock
|
|
74
|
+
] = {} # Lock to prevent concurrent start operations for the same project
|
|
75
|
+
|
|
76
|
+
def _is_port_available(self, port: int) -> bool:
|
|
77
|
+
"""Check if a port is available by trying to bind to it."""
|
|
78
|
+
try:
|
|
79
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
80
|
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
81
|
+
sock.bind(("0.0.0.0", port))
|
|
82
|
+
return True # Port is available
|
|
83
|
+
except OSError:
|
|
84
|
+
return False # Port is in use/bound
|
|
85
|
+
|
|
86
|
+
async def start(self):
|
|
87
|
+
"""Check if the MCP Composer service is enabled."""
|
|
88
|
+
settings = get_settings_service().settings
|
|
89
|
+
if not settings.mcp_composer_enabled:
|
|
90
|
+
await logger.adebug(
|
|
91
|
+
"MCP Composer is disabled in settings. OAuth authentication will not be enabled for MCP Servers."
|
|
92
|
+
)
|
|
93
|
+
else:
|
|
94
|
+
await logger.adebug(
|
|
95
|
+
"MCP Composer is enabled in settings. OAuth authentication will be enabled for MCP Servers."
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
async def stop(self):
|
|
99
|
+
"""Stop all MCP Composer instances."""
|
|
100
|
+
for project_id in list(self.project_composers.keys()):
|
|
101
|
+
await self.stop_project_composer(project_id)
|
|
102
|
+
await logger.adebug("All MCP Composer instances stopped")
|
|
103
|
+
|
|
104
|
+
@require_composer_enabled
|
|
105
|
+
async def stop_project_composer(self, project_id: str):
|
|
106
|
+
"""Stop the MCP Composer instance for a specific project."""
|
|
107
|
+
if project_id not in self.project_composers:
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
# Use the same lock to ensure consistency
|
|
111
|
+
if project_id in self._start_locks:
|
|
112
|
+
async with self._start_locks[project_id]:
|
|
113
|
+
await self._do_stop_project_composer(project_id)
|
|
114
|
+
# Clean up the lock as well
|
|
115
|
+
del self._start_locks[project_id]
|
|
116
|
+
else:
|
|
117
|
+
# Fallback if no lock exists
|
|
118
|
+
await self._do_stop_project_composer(project_id)
|
|
119
|
+
|
|
120
|
+
async def _do_stop_project_composer(self, project_id: str):
|
|
121
|
+
"""Internal method to stop a project composer."""
|
|
122
|
+
if project_id not in self.project_composers:
|
|
123
|
+
return
|
|
124
|
+
|
|
125
|
+
composer_info = self.project_composers[project_id]
|
|
126
|
+
process = composer_info.get("process")
|
|
127
|
+
|
|
128
|
+
if process:
|
|
129
|
+
try:
|
|
130
|
+
# Check if process is still running before trying to terminate
|
|
131
|
+
if process.poll() is None:
|
|
132
|
+
await logger.adebug(f"Terminating MCP Composer process {process.pid} for project {project_id}")
|
|
133
|
+
process.terminate()
|
|
134
|
+
|
|
135
|
+
# Wait longer for graceful shutdown
|
|
136
|
+
try:
|
|
137
|
+
await asyncio.wait_for(self._wait_for_process_exit(process), timeout=3.0)
|
|
138
|
+
await logger.adebug(f"MCP Composer for project {project_id} terminated gracefully")
|
|
139
|
+
except asyncio.TimeoutError:
|
|
140
|
+
await logger.aerror(
|
|
141
|
+
f"MCP Composer for project {project_id} did not terminate gracefully, force killing"
|
|
142
|
+
)
|
|
143
|
+
process.kill()
|
|
144
|
+
# Wait a bit more for force kill to complete
|
|
145
|
+
try:
|
|
146
|
+
await asyncio.wait_for(self._wait_for_process_exit(process), timeout=2.0)
|
|
147
|
+
except asyncio.TimeoutError:
|
|
148
|
+
await logger.aerror(
|
|
149
|
+
f"Failed to kill MCP Composer process {process.pid} for project {project_id}"
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
await logger.adebug(f"MCP Composer process for project {project_id} was already terminated")
|
|
153
|
+
|
|
154
|
+
await logger.adebug(f"MCP Composer stopped for project {project_id}")
|
|
155
|
+
|
|
156
|
+
except ProcessLookupError:
|
|
157
|
+
# Process already terminated
|
|
158
|
+
await logger.adebug(f"MCP Composer process for project {project_id} was already terminated")
|
|
159
|
+
except Exception as e: # noqa: BLE001
|
|
160
|
+
await logger.aerror(f"Error stopping MCP Composer for project {project_id}: {e}")
|
|
161
|
+
|
|
162
|
+
# Remove from tracking
|
|
163
|
+
del self.project_composers[project_id]
|
|
164
|
+
|
|
165
|
+
async def _wait_for_process_exit(self, process):
|
|
166
|
+
"""Wait for a process to exit."""
|
|
167
|
+
await asyncio.to_thread(process.wait)
|
|
168
|
+
|
|
169
|
+
def _validate_oauth_settings(self, auth_config: dict[str, Any]) -> None:
|
|
170
|
+
"""Validate that all required OAuth settings are present and non-empty.
|
|
171
|
+
|
|
172
|
+
Raises:
|
|
173
|
+
MCPComposerConfigError: If any required OAuth field is missing or empty
|
|
174
|
+
"""
|
|
175
|
+
if auth_config.get("auth_type") != "oauth":
|
|
176
|
+
return
|
|
177
|
+
|
|
178
|
+
required_fields = [
|
|
179
|
+
"oauth_host",
|
|
180
|
+
"oauth_port",
|
|
181
|
+
"oauth_server_url",
|
|
182
|
+
"oauth_auth_url",
|
|
183
|
+
"oauth_token_url",
|
|
184
|
+
"oauth_client_id",
|
|
185
|
+
"oauth_client_secret",
|
|
186
|
+
]
|
|
187
|
+
|
|
188
|
+
missing_fields = []
|
|
189
|
+
empty_fields = []
|
|
190
|
+
|
|
191
|
+
for field in required_fields:
|
|
192
|
+
value = auth_config.get(field)
|
|
193
|
+
if value is None:
|
|
194
|
+
missing_fields.append(field)
|
|
195
|
+
elif not str(value).strip():
|
|
196
|
+
empty_fields.append(field)
|
|
197
|
+
|
|
198
|
+
error_parts = []
|
|
199
|
+
if missing_fields:
|
|
200
|
+
error_parts.append(f"Missing required fields: {', '.join(missing_fields)}")
|
|
201
|
+
if empty_fields:
|
|
202
|
+
error_parts.append(f"Empty required fields: {', '.join(empty_fields)}")
|
|
203
|
+
|
|
204
|
+
if error_parts:
|
|
205
|
+
config_error_msg = f"Invalid OAuth configuration: {'; '.join(error_parts)}"
|
|
206
|
+
raise MCPComposerConfigError(config_error_msg)
|
|
207
|
+
|
|
208
|
+
def _has_auth_config_changed(self, existing_auth: dict[str, Any] | None, new_auth: dict[str, Any] | None) -> bool:
|
|
209
|
+
"""Check if auth configuration has changed in a way that requires restart."""
|
|
210
|
+
if not existing_auth and not new_auth:
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
if not existing_auth or not new_auth:
|
|
214
|
+
return True
|
|
215
|
+
|
|
216
|
+
auth_type = new_auth.get("auth_type", "")
|
|
217
|
+
|
|
218
|
+
# Auth type changed?
|
|
219
|
+
if existing_auth.get("auth_type") != auth_type:
|
|
220
|
+
return True
|
|
221
|
+
|
|
222
|
+
# Define which fields to check for each auth type
|
|
223
|
+
fields_to_check = []
|
|
224
|
+
if auth_type == "oauth":
|
|
225
|
+
# Get all oauth_* fields plus host/port from both configs
|
|
226
|
+
all_keys = set(existing_auth.keys()) | set(new_auth.keys())
|
|
227
|
+
fields_to_check = [k for k in all_keys if k.startswith("oauth_") or k in ["host", "port"]]
|
|
228
|
+
elif auth_type == "apikey":
|
|
229
|
+
fields_to_check = ["api_key"]
|
|
230
|
+
|
|
231
|
+
# Compare relevant fields
|
|
232
|
+
for field in fields_to_check:
|
|
233
|
+
old_val = existing_auth.get(field)
|
|
234
|
+
new_val = new_auth.get(field)
|
|
235
|
+
|
|
236
|
+
# Convert None and empty string to None for comparison
|
|
237
|
+
old_normalized = None if (old_val is None or old_val == "") else old_val
|
|
238
|
+
new_normalized = None if (new_val is None or new_val == "") else new_val
|
|
239
|
+
|
|
240
|
+
if old_normalized != new_normalized:
|
|
241
|
+
return True
|
|
242
|
+
|
|
243
|
+
return False
|
|
244
|
+
|
|
245
|
+
def _obfuscate_command_secrets(self, cmd: list[str]) -> list[str]:
|
|
246
|
+
"""Obfuscate secrets in command arguments for safe logging.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
cmd: List of command arguments
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
List of command arguments with secrets replaced with ***REDACTED***
|
|
253
|
+
"""
|
|
254
|
+
safe_cmd = []
|
|
255
|
+
skip_next = False
|
|
256
|
+
|
|
257
|
+
for i, arg in enumerate(cmd):
|
|
258
|
+
if skip_next:
|
|
259
|
+
skip_next = False
|
|
260
|
+
safe_cmd.append("***REDACTED***")
|
|
261
|
+
continue
|
|
262
|
+
|
|
263
|
+
if arg == "--env" and i + 2 < len(cmd):
|
|
264
|
+
# Check if next env var is a secret
|
|
265
|
+
env_key = cmd[i + 1]
|
|
266
|
+
if any(secret in env_key.lower() for secret in ["secret", "key", "token"]):
|
|
267
|
+
safe_cmd.extend([arg, env_key]) # Keep env key, redact value
|
|
268
|
+
skip_next = True
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
safe_cmd.append(arg)
|
|
272
|
+
|
|
273
|
+
return safe_cmd
|
|
274
|
+
|
|
275
|
+
def _extract_error_message(
|
|
276
|
+
self, stdout_content: str, stderr_content: str, oauth_server_url: str | None = None
|
|
277
|
+
) -> str:
|
|
278
|
+
"""Attempts to extract a user-friendly error message from subprocess output.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
stdout_content: Standard output from the subprocess
|
|
282
|
+
stderr_content: Standard error from the subprocess
|
|
283
|
+
oauth_server_url: OAuth server URL
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
User-friendly error message or a generic message if no specific pattern is found
|
|
287
|
+
"""
|
|
288
|
+
# Combine both outputs and clean them up
|
|
289
|
+
combined_output = (stderr_content + "\n" + stdout_content).strip()
|
|
290
|
+
if not oauth_server_url:
|
|
291
|
+
oauth_server_url = "OAuth server URL"
|
|
292
|
+
|
|
293
|
+
# Common error patterns with user-friendly messages
|
|
294
|
+
error_patterns = [
|
|
295
|
+
(r"address already in use", f"Address {oauth_server_url} is already in use."),
|
|
296
|
+
(r"permission denied", f"Permission denied starting MCP Composer on address {oauth_server_url}."),
|
|
297
|
+
(
|
|
298
|
+
r"connection refused",
|
|
299
|
+
f"Connection refused on address {oauth_server_url}. The address may be blocked or unavailable.",
|
|
300
|
+
),
|
|
301
|
+
(
|
|
302
|
+
r"bind.*failed",
|
|
303
|
+
f"Failed to bind to address {oauth_server_url}. The address may be in use or unavailable.",
|
|
304
|
+
),
|
|
305
|
+
(r"timeout", "MCP Composer startup timed out. Please try again."),
|
|
306
|
+
(r"invalid.*configuration", "Invalid MCP Composer configuration. Please check your settings."),
|
|
307
|
+
(r"oauth.*error", "OAuth configuration error. Please check your OAuth settings."),
|
|
308
|
+
(r"authentication.*failed", "Authentication failed. Please check your credentials."),
|
|
309
|
+
]
|
|
310
|
+
|
|
311
|
+
# Check for specific error patterns first
|
|
312
|
+
for pattern, friendly_msg in error_patterns:
|
|
313
|
+
if re.search(pattern, combined_output, re.IGNORECASE):
|
|
314
|
+
return friendly_msg
|
|
315
|
+
|
|
316
|
+
return GENERIC_STARTUP_ERROR_MSG
|
|
317
|
+
|
|
318
|
+
@require_composer_enabled
|
|
319
|
+
async def start_project_composer(
|
|
320
|
+
self,
|
|
321
|
+
project_id: str,
|
|
322
|
+
sse_url: str,
|
|
323
|
+
auth_config: dict[str, Any] | None,
|
|
324
|
+
max_startup_checks: int = 5,
|
|
325
|
+
startup_delay: float = 2.0,
|
|
326
|
+
) -> None:
|
|
327
|
+
"""Start an MCP Composer instance for a specific project.
|
|
328
|
+
|
|
329
|
+
Raises:
|
|
330
|
+
MCPComposerError: Various specific errors if startup fails
|
|
331
|
+
"""
|
|
332
|
+
if not auth_config:
|
|
333
|
+
no_auth_error_msg = "No auth settings provided"
|
|
334
|
+
raise MCPComposerConfigError(no_auth_error_msg, project_id)
|
|
335
|
+
|
|
336
|
+
# Validate OAuth settings early to provide clear error messages
|
|
337
|
+
self._validate_oauth_settings(auth_config)
|
|
338
|
+
|
|
339
|
+
project_host = auth_config.get("oauth_host") if auth_config else "unknown"
|
|
340
|
+
project_port = auth_config.get("oauth_port") if auth_config else "unknown"
|
|
341
|
+
await logger.adebug(f"Starting MCP Composer for project {project_id} on {project_host}:{project_port}")
|
|
342
|
+
|
|
343
|
+
# Use a per-project lock to prevent race conditions
|
|
344
|
+
if project_id not in self._start_locks:
|
|
345
|
+
self._start_locks[project_id] = asyncio.Lock()
|
|
346
|
+
|
|
347
|
+
async with self._start_locks[project_id]:
|
|
348
|
+
# Check if already running (double-check after acquiring lock)
|
|
349
|
+
project_port_str = auth_config.get("oauth_port")
|
|
350
|
+
if not project_port_str:
|
|
351
|
+
no_port_error_msg = "No OAuth port provided"
|
|
352
|
+
raise MCPComposerConfigError(no_port_error_msg, project_id)
|
|
353
|
+
|
|
354
|
+
try:
|
|
355
|
+
project_port = int(project_port_str)
|
|
356
|
+
except (ValueError, TypeError) as e:
|
|
357
|
+
port_error_msg = f"Invalid OAuth port: {project_port_str}"
|
|
358
|
+
raise MCPComposerConfigError(port_error_msg, project_id) from e
|
|
359
|
+
|
|
360
|
+
project_host = auth_config.get("oauth_host")
|
|
361
|
+
if not project_host:
|
|
362
|
+
no_host_error_msg = "No OAuth host provided"
|
|
363
|
+
raise MCPComposerConfigError(no_host_error_msg, project_id)
|
|
364
|
+
|
|
365
|
+
if project_id in self.project_composers:
|
|
366
|
+
composer_info = self.project_composers[project_id]
|
|
367
|
+
process = composer_info.get("process")
|
|
368
|
+
existing_auth = composer_info.get("auth_config", {})
|
|
369
|
+
|
|
370
|
+
# Check if process is still running
|
|
371
|
+
if process and process.poll() is None:
|
|
372
|
+
# Process is running - only restart if config changed
|
|
373
|
+
auth_changed = self._has_auth_config_changed(existing_auth, auth_config)
|
|
374
|
+
if auth_changed:
|
|
375
|
+
await logger.adebug(f"Config changed for project {project_id}, restarting MCP Composer")
|
|
376
|
+
await self._do_stop_project_composer(project_id)
|
|
377
|
+
else:
|
|
378
|
+
await logger.adebug(
|
|
379
|
+
f"MCP Composer already running for project {project_id} with current config"
|
|
380
|
+
)
|
|
381
|
+
return # Already running with correct config
|
|
382
|
+
else:
|
|
383
|
+
# Process died or never started properly, restart it
|
|
384
|
+
await logger.adebug(f"MCP Composer process died for project {project_id}, restarting")
|
|
385
|
+
await self._do_stop_project_composer(project_id)
|
|
386
|
+
|
|
387
|
+
is_port_available = self._is_port_available(project_port)
|
|
388
|
+
if not is_port_available:
|
|
389
|
+
await logger.awarning(f"Port {project_port} is already in use.")
|
|
390
|
+
port_error_msg = f"Port {project_port} is already in use"
|
|
391
|
+
raise MCPComposerPortError(port_error_msg)
|
|
392
|
+
|
|
393
|
+
# Start the MCP Composer process (single attempt, no outer retry loop)
|
|
394
|
+
process = await self._start_project_composer_process(
|
|
395
|
+
project_id, project_host, project_port, sse_url, auth_config, max_startup_checks, startup_delay
|
|
396
|
+
)
|
|
397
|
+
self.project_composers[project_id] = {
|
|
398
|
+
"process": process,
|
|
399
|
+
"host": project_host,
|
|
400
|
+
"port": project_port,
|
|
401
|
+
"sse_url": sse_url,
|
|
402
|
+
"auth_config": auth_config,
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
await logger.adebug(
|
|
406
|
+
f"MCP Composer started for project {project_id} on port {project_port} (PID: {process.pid})"
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
async def _start_project_composer_process(
|
|
410
|
+
self,
|
|
411
|
+
project_id: str,
|
|
412
|
+
host: str,
|
|
413
|
+
port: int,
|
|
414
|
+
sse_url: str,
|
|
415
|
+
auth_config: dict[str, Any] | None = None,
|
|
416
|
+
max_startup_checks: int = 5,
|
|
417
|
+
startup_delay: float = 2.0,
|
|
418
|
+
) -> subprocess.Popen:
|
|
419
|
+
"""Start the MCP Composer subprocess for a specific project."""
|
|
420
|
+
cmd = [
|
|
421
|
+
"uvx",
|
|
422
|
+
"mcp-composer",
|
|
423
|
+
"--mode",
|
|
424
|
+
"sse",
|
|
425
|
+
"--sse-url",
|
|
426
|
+
sse_url,
|
|
427
|
+
"--disable-composer-tools",
|
|
428
|
+
]
|
|
429
|
+
|
|
430
|
+
# Set environment variables
|
|
431
|
+
env = os.environ.copy()
|
|
432
|
+
|
|
433
|
+
oauth_server_url = auth_config.get("oauth_server_url") if auth_config else None
|
|
434
|
+
if auth_config:
|
|
435
|
+
auth_type = auth_config.get("auth_type")
|
|
436
|
+
|
|
437
|
+
if auth_type == "oauth":
|
|
438
|
+
cmd.extend(["--auth_type", "oauth"])
|
|
439
|
+
|
|
440
|
+
# Add OAuth environment variables as command line arguments
|
|
441
|
+
cmd.extend(["--env", "ENABLE_OAUTH", "True"])
|
|
442
|
+
|
|
443
|
+
# Map auth config to environment variables for OAuth
|
|
444
|
+
oauth_env_mapping = {
|
|
445
|
+
"oauth_host": "OAUTH_HOST",
|
|
446
|
+
"oauth_port": "OAUTH_PORT",
|
|
447
|
+
"oauth_server_url": "OAUTH_SERVER_URL",
|
|
448
|
+
"oauth_callback_path": "OAUTH_CALLBACK_PATH",
|
|
449
|
+
"oauth_client_id": "OAUTH_CLIENT_ID",
|
|
450
|
+
"oauth_client_secret": "OAUTH_CLIENT_SECRET",
|
|
451
|
+
"oauth_auth_url": "OAUTH_AUTH_URL",
|
|
452
|
+
"oauth_token_url": "OAUTH_TOKEN_URL",
|
|
453
|
+
"oauth_mcp_scope": "OAUTH_MCP_SCOPE",
|
|
454
|
+
"oauth_provider_scope": "OAUTH_PROVIDER_SCOPE",
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
# Add environment variables as command line arguments
|
|
458
|
+
# Only set non-empty values to avoid Pydantic validation errors
|
|
459
|
+
for config_key, env_key in oauth_env_mapping.items():
|
|
460
|
+
value = auth_config.get(config_key)
|
|
461
|
+
if value is not None and str(value).strip():
|
|
462
|
+
cmd.extend(["--env", env_key, str(value)])
|
|
463
|
+
|
|
464
|
+
# Start the subprocess with both stdout and stderr captured
|
|
465
|
+
process = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) # noqa: ASYNC220, S603
|
|
466
|
+
|
|
467
|
+
# Monitor the process startup with multiple checks
|
|
468
|
+
process_running = False
|
|
469
|
+
port_bound = False
|
|
470
|
+
|
|
471
|
+
await logger.adebug(f"Monitoring MCP Composer startup for project {project_id} (PID: {process.pid})")
|
|
472
|
+
|
|
473
|
+
for check in range(max_startup_checks):
|
|
474
|
+
await asyncio.sleep(startup_delay)
|
|
475
|
+
|
|
476
|
+
# Check if process is still running
|
|
477
|
+
poll_result = process.poll()
|
|
478
|
+
|
|
479
|
+
startup_error_msg = None
|
|
480
|
+
if poll_result is not None:
|
|
481
|
+
# Process terminated, get the error output
|
|
482
|
+
await logger.aerror(f"MCP Composer process {process.pid} terminated with exit code: {poll_result}")
|
|
483
|
+
try:
|
|
484
|
+
stdout_content, stderr_content = process.communicate(timeout=2)
|
|
485
|
+
# Log the full error details for debugging
|
|
486
|
+
await logger.aerror(f"MCP Composer startup failed for project {project_id}")
|
|
487
|
+
await logger.aerror(f"MCP Composer stdout:\n{stdout_content}")
|
|
488
|
+
await logger.aerror(f"MCP Composer stderr:\n{stderr_content}")
|
|
489
|
+
safe_cmd = self._obfuscate_command_secrets(cmd)
|
|
490
|
+
await logger.aerror(f"Command that failed: {' '.join(safe_cmd)}")
|
|
491
|
+
|
|
492
|
+
# Extract meaningful error message
|
|
493
|
+
startup_error_msg = self._extract_error_message(stdout_content, stderr_content, oauth_server_url)
|
|
494
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
495
|
+
except subprocess.TimeoutExpired:
|
|
496
|
+
process.kill()
|
|
497
|
+
await logger.aerror(
|
|
498
|
+
f"MCP Composer process {process.pid} terminated unexpectedly for project {project_id}"
|
|
499
|
+
)
|
|
500
|
+
startup_error_msg = self._extract_error_message("", "", oauth_server_url)
|
|
501
|
+
raise MCPComposerStartupError(startup_error_msg, project_id) from None
|
|
502
|
+
|
|
503
|
+
# Process is still running, check if port is bound
|
|
504
|
+
port_bound = not self._is_port_available(port)
|
|
505
|
+
|
|
506
|
+
if port_bound:
|
|
507
|
+
await logger.adebug(
|
|
508
|
+
f"MCP Composer for project {project_id} bound to port {port} "
|
|
509
|
+
f"(check {check + 1}/{max_startup_checks})"
|
|
510
|
+
)
|
|
511
|
+
process_running = True
|
|
512
|
+
break
|
|
513
|
+
await logger.adebug(
|
|
514
|
+
f"MCP Composer for project {project_id} not yet bound to port {port} "
|
|
515
|
+
f"(check {check + 1}/{max_startup_checks})"
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
# Try to read any available stderr without blocking (only log if there's an error)
|
|
519
|
+
if process.stderr and select.select([process.stderr], [], [], 0)[0]:
|
|
520
|
+
try:
|
|
521
|
+
stderr_line = process.stderr.readline()
|
|
522
|
+
if stderr_line and "ERROR" in stderr_line:
|
|
523
|
+
await logger.aerror(f"MCP Composer error: {stderr_line.strip()}")
|
|
524
|
+
except Exception: # noqa: BLE001
|
|
525
|
+
pass
|
|
526
|
+
|
|
527
|
+
# After all checks
|
|
528
|
+
if not process_running or not port_bound:
|
|
529
|
+
# Get comprehensive error information
|
|
530
|
+
poll_result = process.poll()
|
|
531
|
+
|
|
532
|
+
if poll_result is not None:
|
|
533
|
+
# Process died
|
|
534
|
+
startup_error_msg = None
|
|
535
|
+
try:
|
|
536
|
+
stdout_content, stderr_content = process.communicate(timeout=2)
|
|
537
|
+
# Extract meaningful error message
|
|
538
|
+
startup_error_msg = self._extract_error_message(stdout_content, stderr_content, oauth_server_url)
|
|
539
|
+
await logger.aerror(f"MCP Composer startup failed for project {project_id}:")
|
|
540
|
+
await logger.aerror(f" - Process died with exit code: {poll_result}")
|
|
541
|
+
await logger.aerror(f" - Target: {host}:{port}")
|
|
542
|
+
# Obfuscate secrets in command before logging
|
|
543
|
+
safe_cmd = self._obfuscate_command_secrets(cmd)
|
|
544
|
+
await logger.aerror(f" - Command: {' '.join(safe_cmd)}")
|
|
545
|
+
if stderr_content.strip():
|
|
546
|
+
await logger.aerror(f" - Error output: {stderr_content.strip()}")
|
|
547
|
+
if stdout_content.strip():
|
|
548
|
+
await logger.aerror(f" - Standard output: {stdout_content.strip()}")
|
|
549
|
+
await logger.aerror(f" - Error message: {startup_error_msg}")
|
|
550
|
+
except subprocess.TimeoutExpired:
|
|
551
|
+
await logger.aerror(f"MCP Composer for project {project_id} died but couldn't read output")
|
|
552
|
+
process.kill()
|
|
553
|
+
|
|
554
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
555
|
+
# Process running but port not bound
|
|
556
|
+
await logger.aerror(f"MCP Composer startup failed for project {project_id}:")
|
|
557
|
+
await logger.aerror(f" - Process is running (PID: {process.pid}) but failed to bind to port {port}")
|
|
558
|
+
await logger.aerror(
|
|
559
|
+
f" - Checked {max_startup_checks} times over {max_startup_checks * startup_delay} seconds"
|
|
560
|
+
)
|
|
561
|
+
await logger.aerror(f" - Target: {host}:{port}")
|
|
562
|
+
|
|
563
|
+
# Get any available output before terminating
|
|
564
|
+
startup_error_msg = None
|
|
565
|
+
try:
|
|
566
|
+
process.terminate()
|
|
567
|
+
stdout_content, stderr_content = process.communicate(timeout=2)
|
|
568
|
+
startup_error_msg = self._extract_error_message(stdout_content, stderr_content, oauth_server_url)
|
|
569
|
+
if stderr_content.strip():
|
|
570
|
+
await logger.aerror(f" - Process stderr: {stderr_content.strip()}")
|
|
571
|
+
if stdout_content.strip():
|
|
572
|
+
await logger.aerror(f" - Process stdout: {stdout_content.strip()}")
|
|
573
|
+
except Exception: # noqa: BLE001
|
|
574
|
+
process.kill()
|
|
575
|
+
await logger.aerror(" - Could not retrieve process output before termination")
|
|
576
|
+
|
|
577
|
+
raise MCPComposerStartupError(startup_error_msg, project_id)
|
|
578
|
+
|
|
579
|
+
# Close the pipes if everything is successful
|
|
580
|
+
if process.stdout:
|
|
581
|
+
process.stdout.close()
|
|
582
|
+
if process.stderr:
|
|
583
|
+
process.stderr.close()
|
|
584
|
+
|
|
585
|
+
return process
|
|
586
|
+
|
|
587
|
+
@require_composer_enabled
|
|
588
|
+
def get_project_composer_port(self, project_id: str) -> int | None:
|
|
589
|
+
"""Get the port number for a specific project's composer."""
|
|
590
|
+
if project_id not in self.project_composers:
|
|
591
|
+
return None
|
|
592
|
+
return self.project_composers[project_id]["port"]
|
|
593
|
+
|
|
594
|
+
@require_composer_enabled
|
|
595
|
+
async def teardown(self) -> None:
|
|
596
|
+
"""Clean up resources when the service is torn down."""
|
|
597
|
+
await logger.adebug("Tearing down MCP Composer service...")
|
|
598
|
+
await self.stop()
|
|
599
|
+
await logger.adebug("MCP Composer service teardown complete")
|
lfx/services/schema.py
CHANGED
lfx/services/settings/auth.py
CHANGED
|
@@ -28,17 +28,17 @@ class AuthSettings(BaseSettings):
|
|
|
28
28
|
API_V1_STR: str = "/api/v1"
|
|
29
29
|
|
|
30
30
|
AUTO_LOGIN: bool = Field(
|
|
31
|
-
default=True, # TODO: Set to False in
|
|
31
|
+
default=True, # TODO: Set to False in v2.0
|
|
32
32
|
description=(
|
|
33
33
|
"Enable automatic login with default credentials. "
|
|
34
34
|
"SECURITY WARNING: This bypasses authentication and should only be used in development environments. "
|
|
35
|
-
"Set to False in production."
|
|
35
|
+
"Set to False in production. This will default to False in v2.0."
|
|
36
36
|
),
|
|
37
37
|
)
|
|
38
38
|
"""If True, the application will attempt to log in automatically as a super user."""
|
|
39
|
-
skip_auth_auto_login: bool =
|
|
39
|
+
skip_auth_auto_login: bool = False
|
|
40
40
|
"""If True, the application will skip authentication when AUTO_LOGIN is enabled.
|
|
41
|
-
This will be removed in
|
|
41
|
+
This will be removed in v2.0"""
|
|
42
42
|
|
|
43
43
|
WEBHOOK_AUTH_ENABLE: bool = False
|
|
44
44
|
"""If True, webhook endpoints will require API key authentication.
|
|
@@ -52,7 +52,8 @@ class AuthSettings(BaseSettings):
|
|
|
52
52
|
|
|
53
53
|
NEW_USER_IS_ACTIVE: bool = False
|
|
54
54
|
SUPERUSER: str = DEFAULT_SUPERUSER
|
|
55
|
-
|
|
55
|
+
# Store password as SecretStr to prevent accidental plaintext exposure
|
|
56
|
+
SUPERUSER_PASSWORD: SecretStr = Field(default=DEFAULT_SUPERUSER_PASSWORD)
|
|
56
57
|
|
|
57
58
|
REFRESH_SAME_SITE: Literal["lax", "strict", "none"] = "none"
|
|
58
59
|
"""The SameSite attribute of the refresh token cookie."""
|
|
@@ -75,8 +76,8 @@ class AuthSettings(BaseSettings):
|
|
|
75
76
|
model_config = SettingsConfigDict(validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_")
|
|
76
77
|
|
|
77
78
|
def reset_credentials(self) -> None:
|
|
78
|
-
|
|
79
|
-
self.SUPERUSER_PASSWORD =
|
|
79
|
+
# Preserve the configured username but scrub the password from memory to avoid plaintext exposure.
|
|
80
|
+
self.SUPERUSER_PASSWORD = SecretStr("")
|
|
80
81
|
|
|
81
82
|
# If autologin is true, then we need to set the credentials to
|
|
82
83
|
# the default values
|
|
@@ -85,15 +86,17 @@ class AuthSettings(BaseSettings):
|
|
|
85
86
|
@field_validator("SUPERUSER", "SUPERUSER_PASSWORD", mode="before")
|
|
86
87
|
@classmethod
|
|
87
88
|
def validate_superuser(cls, value, info):
|
|
89
|
+
# When AUTO_LOGIN is enabled, force superuser to use default values.
|
|
88
90
|
if info.data.get("AUTO_LOGIN"):
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
91
|
+
logger.debug("Auto login is enabled, forcing superuser to use default values")
|
|
92
|
+
if info.field_name == "SUPERUSER":
|
|
93
|
+
if value != DEFAULT_SUPERUSER:
|
|
94
|
+
logger.debug("Resetting superuser to default value")
|
|
95
|
+
return DEFAULT_SUPERUSER
|
|
96
|
+
if info.field_name == "SUPERUSER_PASSWORD":
|
|
97
|
+
if value != DEFAULT_SUPERUSER_PASSWORD.get_secret_value():
|
|
98
|
+
logger.debug("Resetting superuser password to default value")
|
|
99
|
+
return DEFAULT_SUPERUSER_PASSWORD
|
|
97
100
|
|
|
98
101
|
return value
|
|
99
102
|
|