lfx-nightly 0.1.12.dev33__py3-none-any.whl → 0.1.12.dev35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lfx-nightly might be problematic. Click here for more details.

lfx/base/agents/events.py CHANGED
@@ -80,7 +80,7 @@ async def handle_on_chain_start(
80
80
  header={"title": "Input", "icon": "MessageSquare"},
81
81
  )
82
82
  agent_message.content_blocks[0].contents.append(text_content)
83
- agent_message = await send_message_method(message=agent_message)
83
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
84
84
  start_time = perf_counter()
85
85
  return agent_message, start_time
86
86
 
@@ -151,7 +151,7 @@ async def handle_on_chain_end(
151
151
  header={"title": "Output", "icon": "MessageSquare"},
152
152
  )
153
153
  agent_message.content_blocks[0].contents.append(text_content)
154
- agent_message = await send_message_method(message=agent_message)
154
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
155
155
  start_time = perf_counter()
156
156
  return agent_message, start_time
157
157
 
@@ -190,7 +190,7 @@ async def handle_on_tool_start(
190
190
  tool_blocks_map[tool_key] = tool_content
191
191
  agent_message.content_blocks[0].contents.append(tool_content)
192
192
 
193
- agent_message = await send_message_method(message=agent_message)
193
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
194
194
  if agent_message.content_blocks and agent_message.content_blocks[0].contents:
195
195
  tool_blocks_map[tool_key] = agent_message.content_blocks[0].contents[-1]
196
196
  return agent_message, new_start_time
@@ -210,7 +210,7 @@ async def handle_on_tool_end(
210
210
 
211
211
  if tool_content and isinstance(tool_content, ToolContent):
212
212
  # Call send_message_method first to get the updated message structure
213
- agent_message = await send_message_method(message=agent_message)
213
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
214
214
  new_start_time = perf_counter()
215
215
 
216
216
  # Now find and update the tool content in the current message
@@ -258,7 +258,7 @@ async def handle_on_tool_error(
258
258
  tool_content.error = event["data"].get("error", "Unknown error")
259
259
  tool_content.duration = _calculate_duration(start_time)
260
260
  tool_content.header = {"title": f"Error using **{tool_content.name}**", "icon": "Hammer"}
261
- agent_message = await send_message_method(message=agent_message)
261
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
262
262
  start_time = perf_counter()
263
263
  return agent_message, start_time
264
264
 
@@ -275,14 +275,14 @@ async def handle_on_chain_stream(
275
275
  if output and isinstance(output, str | list):
276
276
  agent_message.text = _extract_output_text(output)
277
277
  agent_message.properties.state = "complete"
278
- agent_message = await send_message_method(message=agent_message)
278
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
279
279
  start_time = perf_counter()
280
280
  elif isinstance(data_chunk, AIMessageChunk):
281
281
  output_text = _extract_output_text(data_chunk.content)
282
282
  if output_text and isinstance(agent_message.text, str):
283
283
  agent_message.text += output_text
284
284
  agent_message.properties.state = "partial"
285
- agent_message = await send_message_method(message=agent_message)
285
+ agent_message = await send_message_method(message=agent_message, skip_db_update=True)
286
286
  if not agent_message.text:
287
287
  start_time = perf_counter()
288
288
  return agent_message, start_time
@@ -346,13 +346,17 @@ async def process_agent_events(
346
346
  async for event in agent_executor:
347
347
  if event["event"] in TOOL_EVENT_HANDLERS:
348
348
  tool_handler = TOOL_EVENT_HANDLERS[event["event"]]
349
+ # Use skip_db_update=True during streaming to avoid DB round-trips
349
350
  agent_message, start_time = await tool_handler(
350
351
  event, agent_message, tool_blocks_map, send_message_method, start_time
351
352
  )
352
353
  elif event["event"] in CHAIN_EVENT_HANDLERS:
353
354
  chain_handler = CHAIN_EVENT_HANDLERS[event["event"]]
355
+ # Use skip_db_update=True during streaming to avoid DB round-trips
354
356
  agent_message, start_time = await chain_handler(event, agent_message, send_message_method, start_time)
355
357
  agent_message.properties.state = "complete"
358
+ # Final DB update with the complete message (skip_db_update=False by default)
359
+ agent_message = await send_message_method(message=agent_message)
356
360
  except Exception as e:
357
361
  raise ExceptionWithMessageError(agent_message, str(e)) from e
358
362
  return await Message.create(**agent_message.model_dump())
lfx/base/mcp/util.py CHANGED
@@ -28,8 +28,12 @@ HTTP_ERROR_STATUS_CODE = httpx_codes.BAD_REQUEST # HTTP status code for client
28
28
 
29
29
  # HTTP status codes used in validation
30
30
  HTTP_NOT_FOUND = 404
31
+ HTTP_METHOD_NOT_ALLOWED = 405
32
+ HTTP_NOT_ACCEPTABLE = 406
31
33
  HTTP_BAD_REQUEST = 400
32
34
  HTTP_INTERNAL_SERVER_ERROR = 500
35
+ HTTP_UNAUTHORIZED = 401
36
+ HTTP_FORBIDDEN = 403
33
37
 
34
38
  # MCP Session Manager constants
35
39
  settings = get_settings_service().settings
@@ -378,8 +382,8 @@ def _validate_node_installation(command: str) -> str:
378
382
 
379
383
  async def _validate_connection_params(mode: str, command: str | None = None, url: str | None = None) -> None:
380
384
  """Validate connection parameters based on mode."""
381
- if mode not in ["Stdio", "SSE"]:
382
- msg = f"Invalid mode: {mode}. Must be either 'Stdio' or 'SSE'"
385
+ if mode not in ["Stdio", "Streamable_HTTP", "SSE"]:
386
+ msg = f"Invalid mode: {mode}. Must be either 'Stdio', 'Streamable_HTTP', or 'SSE'"
383
387
  raise ValueError(msg)
384
388
 
385
389
  if mode == "Stdio" and not command:
@@ -387,8 +391,8 @@ async def _validate_connection_params(mode: str, command: str | None = None, url
387
391
  raise ValueError(msg)
388
392
  if mode == "Stdio" and command:
389
393
  _validate_node_installation(command)
390
- if mode == "SSE" and not url:
391
- msg = "URL is required for SSE mode"
394
+ if mode in ["Streamable_HTTP", "SSE"] and not url:
395
+ msg = f"URL is required for {mode} mode"
392
396
  raise ValueError(msg)
393
397
 
394
398
 
@@ -400,6 +404,7 @@ class MCPSessionManager:
400
404
  2. Maximum session limits per server to prevent resource exhaustion
401
405
  3. Idle timeout for automatic session cleanup
402
406
  4. Periodic cleanup of stale sessions
407
+ 5. Transport preference caching to avoid retrying failed transports
403
408
  """
404
409
 
405
410
  def __init__(self):
@@ -410,6 +415,9 @@ class MCPSessionManager:
410
415
  self._context_to_session: dict[str, tuple[str, str]] = {}
411
416
  # Reference count for each active (server_key, session_id)
412
417
  self._session_refcount: dict[tuple[str, str], int] = {}
418
+ # Cache which transport works for each server to avoid retrying failed transports
419
+ # server_key -> "streamable_http" | "sse"
420
+ self._transport_preference: dict[str, str] = {}
413
421
  self._cleanup_task = None
414
422
  self._start_cleanup_task()
415
423
 
@@ -467,15 +475,16 @@ class MCPSessionManager:
467
475
  env_str = str(sorted((connection_params.env or {}).items()))
468
476
  key_input = f"{command_str}|{env_str}"
469
477
  return f"stdio_{hash(key_input)}"
470
- elif transport_type == "sse" and (isinstance(connection_params, dict) and "url" in connection_params):
478
+ elif transport_type == "streamable_http" and (
479
+ isinstance(connection_params, dict) and "url" in connection_params
480
+ ):
471
481
  # Include URL and headers for uniqueness
472
482
  url = connection_params["url"]
473
483
  headers = str(sorted((connection_params.get("headers", {})).items()))
474
484
  key_input = f"{url}|{headers}"
475
- return f"sse_{hash(key_input)}"
485
+ return f"streamable_http_{hash(key_input)}"
476
486
 
477
487
  # Fallback to a generic key
478
- # TODO: add option for streamable HTTP in future.
479
488
  return f"{transport_type}_{hash(str(connection_params))}"
480
489
 
481
490
  async def _validate_session_connectivity(self, session) -> bool:
@@ -525,7 +534,7 @@ class MCPSessionManager:
525
534
  """Get or create a session with improved reuse strategy.
526
535
 
527
536
  The key insight is that we should reuse sessions based on the server
528
- identity (command + args for stdio, URL for SSE) rather than the context_id.
537
+ identity (command + args for stdio, URL for Streamable HTTP) rather than the context_id.
529
538
  This prevents creating a new subprocess for each unique context.
530
539
  """
531
540
  server_key = self._get_server_key(connection_params, transport_type)
@@ -578,17 +587,24 @@ class MCPSessionManager:
578
587
 
579
588
  if transport_type == "stdio":
580
589
  session, task = await self._create_stdio_session(session_id, connection_params)
581
- elif transport_type == "sse":
582
- session, task = await self._create_sse_session(session_id, connection_params)
590
+ actual_transport = "stdio"
591
+ elif transport_type == "streamable_http":
592
+ # Pass the cached transport preference if available
593
+ preferred_transport = self._transport_preference.get(server_key)
594
+ session, task, actual_transport = await self._create_streamable_http_session(
595
+ session_id, connection_params, preferred_transport
596
+ )
597
+ # Cache the transport that worked for future connections
598
+ self._transport_preference[server_key] = actual_transport
583
599
  else:
584
600
  msg = f"Unknown transport type: {transport_type}"
585
601
  raise ValueError(msg)
586
602
 
587
- # Store session info
603
+ # Store session info with the actual transport used
588
604
  sessions[session_id] = {
589
605
  "session": session,
590
606
  "task": task,
591
- "type": transport_type,
607
+ "type": actual_transport,
592
608
  "last_used": asyncio.get_event_loop().time(),
593
609
  }
594
610
 
@@ -634,9 +650,9 @@ class MCPSessionManager:
634
650
  self._background_tasks.add(task)
635
651
  task.add_done_callback(self._background_tasks.discard)
636
652
 
637
- # Wait for session to be ready
653
+ # Wait for session to be ready (use longer timeout for remote connections)
638
654
  try:
639
- session = await asyncio.wait_for(session_future, timeout=10.0)
655
+ session = await asyncio.wait_for(session_future, timeout=30.0)
640
656
  except asyncio.TimeoutError as timeout_err:
641
657
  # Clean up the failed task
642
658
  if not task.done():
@@ -652,50 +668,136 @@ class MCPSessionManager:
652
668
 
653
669
  return session, task
654
670
 
655
- async def _create_sse_session(self, session_id: str, connection_params):
656
- """Create a new SSE session as a background task to avoid context issues."""
671
+ async def _create_streamable_http_session(
672
+ self, session_id: str, connection_params, preferred_transport: str | None = None
673
+ ):
674
+ """Create a new Streamable HTTP session with SSE fallback as a background task to avoid context issues.
675
+
676
+ Args:
677
+ session_id: Unique identifier for this session
678
+ connection_params: Connection parameters including URL, headers, timeouts
679
+ preferred_transport: If set to "sse", skip Streamable HTTP and go directly to SSE
680
+
681
+ Returns:
682
+ tuple: (session, task, transport_used) where transport_used is "streamable_http" or "sse"
683
+ """
657
684
  import asyncio
658
685
 
659
686
  from mcp.client.sse import sse_client
687
+ from mcp.client.streamable_http import streamablehttp_client
660
688
 
661
689
  # Create a future to get the session
662
690
  session_future: asyncio.Future[ClientSession] = asyncio.Future()
691
+ # Track which transport succeeded
692
+ used_transport: list[str] = []
663
693
 
664
694
  async def session_task():
665
695
  """Background task that keeps the session alive."""
666
- try:
667
- async with sse_client(
668
- connection_params["url"],
669
- connection_params["headers"],
670
- connection_params["timeout_seconds"],
671
- connection_params["sse_read_timeout_seconds"],
672
- ) as (read, write):
673
- session = ClientSession(read, write)
674
- async with session:
675
- await session.initialize()
676
- # Signal that session is ready
677
- session_future.set_result(session)
696
+ streamable_error = None
678
697
 
679
- # Keep the session alive until cancelled
680
- import anyio
698
+ # Skip Streamable HTTP if we know SSE works for this server
699
+ if preferred_transport != "sse":
700
+ # Try Streamable HTTP first with a quick timeout
701
+ try:
702
+ await logger.adebug(f"Attempting Streamable HTTP connection for session {session_id}")
703
+ # Use a shorter timeout for the initial connection attempt (2 seconds)
704
+ async with streamablehttp_client(
705
+ url=connection_params["url"],
706
+ headers=connection_params["headers"],
707
+ timeout=connection_params["timeout_seconds"],
708
+ ) as (read, write, _):
709
+ session = ClientSession(read, write)
710
+ async with session:
711
+ # Initialize with a timeout to fail fast
712
+ await asyncio.wait_for(session.initialize(), timeout=2.0)
713
+ used_transport.append("streamable_http")
714
+ await logger.ainfo(f"Session {session_id} connected via Streamable HTTP")
715
+ # Signal that session is ready
716
+ session_future.set_result(session)
717
+
718
+ # Keep the session alive until cancelled
719
+ import anyio
720
+
721
+ event = anyio.Event()
722
+ try:
723
+ await event.wait()
724
+ except asyncio.CancelledError:
725
+ await logger.ainfo(f"Session {session_id} (Streamable HTTP) is shutting down")
726
+ except (asyncio.TimeoutError, Exception) as e: # noqa: BLE001
727
+ # If Streamable HTTP fails or times out, try SSE as fallback immediately
728
+ streamable_error = e
729
+ error_type = "timed out" if isinstance(e, asyncio.TimeoutError) else "failed"
730
+ await logger.awarning(
731
+ f"Streamable HTTP {error_type} for session {session_id}: {e}. Falling back to SSE..."
732
+ )
733
+ else:
734
+ await logger.adebug(f"Skipping Streamable HTTP for session {session_id}, using cached SSE preference")
681
735
 
682
- event = anyio.Event()
683
- try:
684
- await event.wait()
685
- except asyncio.CancelledError:
686
- await logger.ainfo(f"Session {session_id} is shutting down")
687
- except Exception as e: # noqa: BLE001
688
- if not session_future.done():
689
- session_future.set_exception(e)
736
+ # Try SSE if Streamable HTTP failed or if SSE is preferred
737
+ if streamable_error is not None or preferred_transport == "sse":
738
+ try:
739
+ await logger.adebug(f"Attempting SSE connection for session {session_id}")
740
+ # Extract SSE read timeout from connection params, default to 30s if not present
741
+ sse_read_timeout = connection_params.get("sse_read_timeout_seconds", 30)
742
+
743
+ async with sse_client(
744
+ connection_params["url"],
745
+ connection_params["headers"],
746
+ connection_params["timeout_seconds"],
747
+ sse_read_timeout,
748
+ ) as (read, write):
749
+ session = ClientSession(read, write)
750
+ async with session:
751
+ await session.initialize()
752
+ used_transport.append("sse")
753
+ fallback_msg = " (fallback)" if streamable_error else " (preferred)"
754
+ await logger.ainfo(f"Session {session_id} connected via SSE{fallback_msg}")
755
+ # Signal that session is ready
756
+ if not session_future.done():
757
+ session_future.set_result(session)
758
+
759
+ # Keep the session alive until cancelled
760
+ import anyio
761
+
762
+ event = anyio.Event()
763
+ try:
764
+ await event.wait()
765
+ except asyncio.CancelledError:
766
+ await logger.ainfo(f"Session {session_id} (SSE) is shutting down")
767
+ except Exception as sse_error: # noqa: BLE001
768
+ # Both transports failed (or just SSE if it was preferred)
769
+ if streamable_error:
770
+ await logger.aerror(
771
+ f"Both Streamable HTTP and SSE failed for session {session_id}. "
772
+ f"Streamable HTTP error: {streamable_error}. SSE error: {sse_error}"
773
+ )
774
+ if not session_future.done():
775
+ session_future.set_exception(
776
+ ValueError(
777
+ f"Failed to connect via Streamable HTTP ({streamable_error}) or SSE ({sse_error})"
778
+ )
779
+ )
780
+ else:
781
+ await logger.aerror(f"SSE connection failed for session {session_id}: {sse_error}")
782
+ if not session_future.done():
783
+ session_future.set_exception(ValueError(f"Failed to connect via SSE: {sse_error}"))
690
784
 
691
785
  # Start the background task
692
786
  task = asyncio.create_task(session_task())
693
787
  self._background_tasks.add(task)
694
788
  task.add_done_callback(self._background_tasks.discard)
695
789
 
696
- # Wait for session to be ready
790
+ # Wait for session to be ready (use longer timeout for remote connections)
697
791
  try:
698
- session = await asyncio.wait_for(session_future, timeout=10.0)
792
+ session = await asyncio.wait_for(session_future, timeout=30.0)
793
+ # Log which transport was used
794
+ if used_transport:
795
+ transport_used = used_transport[0]
796
+ await logger.ainfo(f"Session {session_id} successfully established using {transport_used}")
797
+ return session, task, transport_used
798
+ # This shouldn't happen, but handle it just in case
799
+ msg = f"Session {session_id} established but transport not recorded"
800
+ raise ValueError(msg)
699
801
  except asyncio.TimeoutError as timeout_err:
700
802
  # Clean up the failed task
701
803
  if not task.done():
@@ -705,12 +807,10 @@ class MCPSessionManager:
705
807
  with contextlib.suppress(asyncio.CancelledError):
706
808
  await task
707
809
  self._background_tasks.discard(task)
708
- msg = f"Timeout waiting for SSE session {session_id} to initialize"
810
+ msg = f"Timeout waiting for Streamable HTTP/SSE session {session_id} to initialize"
709
811
  await logger.aerror(msg)
710
812
  raise ValueError(msg) from timeout_err
711
813
 
712
- return session, task
713
-
714
814
  async def _cleanup_session_by_id(self, server_key: str, session_id: str):
715
815
  """Clean up a specific session by server key and session ID."""
716
816
  if server_key not in self.sessions_by_server:
@@ -1056,7 +1156,7 @@ class MCPStdioClient:
1056
1156
  await self.disconnect()
1057
1157
 
1058
1158
 
1059
- class MCPSseClient:
1159
+ class MCPStreamableHttpClient:
1060
1160
  def __init__(self, component_cache=None):
1061
1161
  self.session: ClientSession | None = None
1062
1162
  self._connection_params = None
@@ -1080,67 +1180,15 @@ class MCPSseClient:
1080
1180
  self._component_cache.set("mcp_session_manager", session_manager)
1081
1181
  return session_manager
1082
1182
 
1083
- async def validate_url(self, url: str | None, headers: dict[str, str] | None = None) -> tuple[bool, str]:
1084
- """Validate the SSE URL before attempting connection."""
1183
+ async def validate_url(self, url: str | None) -> tuple[bool, str]:
1184
+ """Validate the Streamable HTTP URL before attempting connection."""
1085
1185
  try:
1086
1186
  parsed = urlparse(url)
1087
1187
  if not parsed.scheme or not parsed.netloc:
1088
1188
  return False, "Invalid URL format. Must include scheme (http/https) and host."
1089
-
1090
- async with httpx.AsyncClient() as client:
1091
- try:
1092
- # For SSE endpoints, try a GET request with short timeout
1093
- # Many SSE servers don't support HEAD requests and return 404
1094
- response = await client.get(
1095
- url, timeout=2.0, headers={"Accept": "text/event-stream", **(headers or {})}
1096
- )
1097
-
1098
- # For SSE, we expect the server to either:
1099
- # 1. Start streaming (200)
1100
- # 2. Return 404 if HEAD/GET without proper SSE handshake is not supported
1101
- # 3. Return other status codes that we should handle gracefully
1102
-
1103
- # Don't fail on 404 since many SSE endpoints return this for non-SSE requests
1104
- if response.status_code == HTTP_NOT_FOUND:
1105
- # This is likely an SSE endpoint that doesn't support regular GET
1106
- # Let the actual SSE connection attempt handle this
1107
- return True, ""
1108
-
1109
- # Fail on client errors except 404, but allow server errors and redirects
1110
- if (
1111
- HTTP_BAD_REQUEST <= response.status_code < HTTP_INTERNAL_SERVER_ERROR
1112
- and response.status_code != HTTP_NOT_FOUND
1113
- ):
1114
- return False, f"Server returned client error status: {response.status_code}"
1115
-
1116
- except httpx.TimeoutException:
1117
- # Timeout on a short request might indicate the server is trying to stream
1118
- # This is actually expected behavior for SSE endpoints
1119
- return True, ""
1120
- except httpx.NetworkError:
1121
- return False, "Network error. Could not reach the server."
1122
- else:
1123
- return True, ""
1124
-
1125
- except (httpx.HTTPError, ValueError, OSError) as e:
1189
+ except (ValueError, OSError) as e:
1126
1190
  return False, f"URL validation error: {e!s}"
1127
-
1128
- async def pre_check_redirect(self, url: str | None, headers: dict[str, str] | None = None) -> str | None:
1129
- """Check for redirects and return the final URL."""
1130
- if url is None:
1131
- return url
1132
- try:
1133
- async with httpx.AsyncClient(follow_redirects=False) as client:
1134
- # Use GET with SSE headers instead of HEAD since many SSE servers don't support HEAD
1135
- response = await client.get(
1136
- url, timeout=2.0, headers={"Accept": "text/event-stream", **(headers or {})}
1137
- )
1138
- if response.status_code == httpx.codes.TEMPORARY_REDIRECT:
1139
- return response.headers.get("Location", url)
1140
- # Don't treat 404 as an error here - let the main connection handle it
1141
- except (httpx.RequestError, httpx.HTTPError) as e:
1142
- await logger.awarning(f"Error checking redirects: {e}")
1143
- return url
1191
+ return True, ""
1144
1192
 
1145
1193
  async def _connect_to_server(
1146
1194
  self,
@@ -1149,27 +1197,31 @@ class MCPSseClient:
1149
1197
  timeout_seconds: int = 30,
1150
1198
  sse_read_timeout_seconds: int = 30,
1151
1199
  ) -> list[StructuredTool]:
1152
- """Connect to MCP server using SSE transport (SDK style)."""
1200
+ """Connect to MCP server using Streamable HTTP transport with SSE fallback (SDK style)."""
1153
1201
  # Validate and sanitize headers early
1154
1202
  validated_headers = _process_headers(headers)
1155
1203
 
1156
1204
  if url is None:
1157
- msg = "URL is required for SSE mode"
1205
+ msg = "URL is required for StreamableHTTP or SSE mode"
1158
1206
  raise ValueError(msg)
1159
- is_valid, error_msg = await self.validate_url(url, validated_headers)
1160
- if not is_valid:
1161
- msg = f"Invalid SSE URL ({url}): {error_msg}"
1162
- raise ValueError(msg)
1163
-
1164
- url = await self.pre_check_redirect(url, validated_headers)
1165
1207
 
1166
- # Store connection parameters for later use in run_tool
1167
- self._connection_params = {
1168
- "url": url,
1169
- "headers": validated_headers,
1170
- "timeout_seconds": timeout_seconds,
1171
- "sse_read_timeout_seconds": sse_read_timeout_seconds,
1172
- }
1208
+ # Only validate URL if we don't have a cached session
1209
+ # This avoids expensive HTTP validation calls when reusing sessions
1210
+ if not self._connected or not self._connection_params:
1211
+ is_valid, error_msg = await self.validate_url(url)
1212
+ if not is_valid:
1213
+ msg = f"Invalid Streamable HTTP or SSE URL ({url}): {error_msg}"
1214
+ raise ValueError(msg)
1215
+ # Store connection parameters for later use in run_tool
1216
+ # Include SSE read timeout for fallback
1217
+ self._connection_params = {
1218
+ "url": url,
1219
+ "headers": validated_headers,
1220
+ "timeout_seconds": timeout_seconds,
1221
+ "sse_read_timeout_seconds": sse_read_timeout_seconds,
1222
+ }
1223
+ elif headers:
1224
+ self._connection_params["headers"] = validated_headers
1173
1225
 
1174
1226
  # If no session context is set, create a default one
1175
1227
  if not self._session_context:
@@ -1177,18 +1229,21 @@ class MCPSseClient:
1177
1229
  import uuid
1178
1230
 
1179
1231
  param_hash = uuid.uuid4().hex[:8]
1180
- self._session_context = f"default_sse_{param_hash}"
1232
+ self._session_context = f"default_http_{param_hash}"
1181
1233
 
1182
- # Get or create a persistent session
1234
+ # Get or create a persistent session (will try Streamable HTTP, then SSE fallback)
1183
1235
  session = await self._get_or_create_session()
1184
1236
  response = await session.list_tools()
1185
1237
  self._connected = True
1186
1238
  return response.tools
1187
1239
 
1188
- async def connect_to_server(self, url: str, headers: dict[str, str] | None = None) -> list[StructuredTool]:
1189
- """Connect to MCP server using SSE transport (SDK style)."""
1240
+ async def connect_to_server(
1241
+ self, url: str, headers: dict[str, str] | None = None, sse_read_timeout_seconds: int = 30
1242
+ ) -> list[StructuredTool]:
1243
+ """Connect to MCP server using Streamable HTTP with SSE fallback transport (SDK style)."""
1190
1244
  return await asyncio.wait_for(
1191
- self._connect_to_server(url, headers), timeout=get_settings_service().settings.mcp_server_timeout
1245
+ self._connect_to_server(url, headers, sse_read_timeout_seconds=sse_read_timeout_seconds),
1246
+ timeout=get_settings_service().settings.mcp_server_timeout,
1192
1247
  )
1193
1248
 
1194
1249
  def set_session_context(self, context_id: str):
@@ -1204,12 +1259,14 @@ class MCPSseClient:
1204
1259
  # Use cached session manager to get/create persistent session
1205
1260
  session_manager = self._get_session_manager()
1206
1261
  # Cache session so we can access server-assigned session_id later for DELETE
1207
- self.session = await session_manager.get_session(self._session_context, self._connection_params, "sse")
1262
+ self.session = await session_manager.get_session(
1263
+ self._session_context, self._connection_params, "streamable_http"
1264
+ )
1208
1265
  return self.session
1209
1266
 
1210
1267
  async def _terminate_remote_session(self) -> None:
1211
1268
  """Attempt to explicitly terminate the remote MCP session via HTTP DELETE (best-effort)."""
1212
- # Only relevant for SSE transport
1269
+ # Only relevant for Streamable HTTP or SSE transport
1213
1270
  if not self._connection_params or "url" not in self._connection_params:
1214
1271
  return
1215
1272
 
@@ -1255,7 +1312,7 @@ class MCPSseClient:
1255
1312
  import uuid
1256
1313
 
1257
1314
  param_hash = uuid.uuid4().hex[:8]
1258
- self._session_context = f"default_sse_{param_hash}"
1315
+ self._session_context = f"default_http_{param_hash}"
1259
1316
 
1260
1317
  max_retries = 2
1261
1318
  last_error_type = None
@@ -1326,7 +1383,7 @@ class MCPSseClient:
1326
1383
  await logger.aerror(msg)
1327
1384
  # Clean up failed session from cache
1328
1385
  if self._session_context and self._component_cache:
1329
- cache_key = f"mcp_session_sse_{self._session_context}"
1386
+ cache_key = f"mcp_session_http_{self._session_context}"
1330
1387
  self._component_cache.delete(cache_key)
1331
1388
  self._connected = False
1332
1389
  raise ValueError(msg) from e
@@ -1364,11 +1421,17 @@ class MCPSseClient:
1364
1421
  await self.disconnect()
1365
1422
 
1366
1423
 
1424
+ # Backward compatibility: MCPSseClient is now an alias for MCPStreamableHttpClient
1425
+ # The new client supports both Streamable HTTP and SSE with automatic fallback
1426
+ MCPSseClient = MCPStreamableHttpClient
1427
+
1428
+
1367
1429
  async def update_tools(
1368
1430
  server_name: str,
1369
1431
  server_config: dict,
1370
1432
  mcp_stdio_client: MCPStdioClient | None = None,
1371
- mcp_sse_client: MCPSseClient | None = None,
1433
+ mcp_streamable_http_client: MCPStreamableHttpClient | None = None,
1434
+ mcp_sse_client: MCPStreamableHttpClient | None = None, # Backward compatibility
1372
1435
  ) -> tuple[str, list[StructuredTool], dict[str, StructuredTool]]:
1373
1436
  """Fetch server config and update available tools."""
1374
1437
  if server_config is None:
@@ -1377,11 +1440,17 @@ async def update_tools(
1377
1440
  return "", [], {}
1378
1441
  if mcp_stdio_client is None:
1379
1442
  mcp_stdio_client = MCPStdioClient()
1380
- if mcp_sse_client is None:
1381
- mcp_sse_client = MCPSseClient()
1443
+
1444
+ # Backward compatibility: accept mcp_sse_client parameter
1445
+ if mcp_streamable_http_client is None:
1446
+ mcp_streamable_http_client = mcp_sse_client if mcp_sse_client is not None else MCPStreamableHttpClient()
1382
1447
 
1383
1448
  # Fetch server config from backend
1384
- mode = "Stdio" if "command" in server_config else "SSE" if "url" in server_config else ""
1449
+ # Determine mode from config, defaulting to Streamable_HTTP if URL present
1450
+ mode = server_config.get("mode", "")
1451
+ if not mode:
1452
+ mode = "Stdio" if "command" in server_config else "Streamable_HTTP" if "url" in server_config else ""
1453
+
1385
1454
  command = server_config.get("command", "")
1386
1455
  url = server_config.get("url", "")
1387
1456
  tools = []
@@ -1394,7 +1463,7 @@ async def update_tools(
1394
1463
  raise
1395
1464
 
1396
1465
  # Determine connection type and parameters
1397
- client: MCPStdioClient | MCPSseClient | None = None
1466
+ client: MCPStdioClient | MCPStreamableHttpClient | None = None
1398
1467
  if mode == "Stdio":
1399
1468
  # Stdio connection
1400
1469
  args = server_config.get("args", [])
@@ -1402,10 +1471,10 @@ async def update_tools(
1402
1471
  full_command = " ".join([command, *args])
1403
1472
  tools = await mcp_stdio_client.connect_to_server(full_command, env)
1404
1473
  client = mcp_stdio_client
1405
- elif mode == "SSE":
1406
- # SSE connection
1407
- tools = await mcp_sse_client.connect_to_server(url, headers=headers)
1408
- client = mcp_sse_client
1474
+ elif mode in ["Streamable_HTTP", "SSE"]:
1475
+ # Streamable HTTP connection with SSE fallback
1476
+ tools = await mcp_streamable_http_client.connect_to_server(url, headers=headers)
1477
+ client = mcp_streamable_http_client
1409
1478
  else:
1410
1479
  logger.error(f"Invalid MCP server mode for '{server_name}': {mode}")
1411
1480
  return "", [], {}
lfx/base/models/model.py CHANGED
@@ -229,7 +229,7 @@ class LCModelComponent(Component):
229
229
  system_message_added = True
230
230
  runnable = prompt | runnable
231
231
  else:
232
- messages.append(input_value.to_lc_message())
232
+ messages.append(input_value.to_lc_message(self.name))
233
233
  else:
234
234
  messages.append(HumanMessage(content=input_value))
235
235
 
@@ -6,7 +6,7 @@ from langchain_core.tools import BaseTool, ToolException
6
6
  from typing_extensions import override
7
7
 
8
8
  from lfx.base.flow_processing.utils import build_data_from_result_data, format_flow_output_data
9
- from lfx.helpers.flow import build_schema_from_inputs, get_arg_names, get_flow_inputs, run_flow
9
+ from lfx.helpers import build_schema_from_inputs, get_arg_names, get_flow_inputs, run_flow
10
10
  from lfx.log.logger import logger
11
11
  from lfx.utils.async_helpers import run_until_complete
12
12
 
@@ -5,7 +5,7 @@ from lfx.custom.custom_component.component import Component, get_component_toolk
5
5
  from lfx.field_typing import Tool
6
6
  from lfx.graph.graph.base import Graph
7
7
  from lfx.graph.vertex.base import Vertex
8
- from lfx.helpers.flow import get_flow_inputs
8
+ from lfx.helpers import get_flow_inputs
9
9
  from lfx.inputs.inputs import DropdownInput, InputTypes, MessageInput
10
10
  from lfx.log.logger import logger
11
11
  from lfx.schema.data import Data