nvidia-nat-mcp 1.3.0a20251006__py3-none-any.whl → 1.3.0a20251008__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,29 +13,79 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ import asyncio
16
17
  import logging
18
+ from contextlib import asynccontextmanager
19
+ from dataclasses import dataclass
20
+ from dataclasses import field
21
+ from datetime import datetime
17
22
  from datetime import timedelta
18
- from typing import Literal
19
23
 
24
+ import aiorwlock
20
25
  from pydantic import BaseModel
21
- from pydantic import Field
22
- from pydantic import HttpUrl
23
- from pydantic import model_validator
24
26
 
27
+ from nat.authentication.interfaces import AuthProviderBase
25
28
  from nat.builder.builder import Builder
26
29
  from nat.builder.function import FunctionGroup
27
30
  from nat.cli.register_workflow import register_function_group
28
- from nat.data_models.component_ref import AuthenticationRef
29
- from nat.data_models.function import FunctionGroupBaseConfig
30
- from nat.plugins.mcp.tool import mcp_tool_function
31
+ from nat.plugins.mcp.client_base import MCPBaseClient
32
+ from nat.plugins.mcp.client_config import MCPClientConfig
33
+ from nat.plugins.mcp.client_config import MCPToolOverrideConfig
34
+ from nat.plugins.mcp.utils import truncate_session_id
31
35
 
32
36
  logger = logging.getLogger(__name__)
33
37
 
34
38
 
39
+ @dataclass
40
+ class SessionData:
41
+ """Container for all session-related data."""
42
+ client: MCPBaseClient
43
+ last_activity: datetime
44
+ ref_count: int = 0
45
+ lock: asyncio.Lock = field(default_factory=asyncio.Lock)
46
+
47
+ # lifetime task to respect task boundaries
48
+ stop_event: asyncio.Event = field(default_factory=asyncio.Event)
49
+ lifetime_task: asyncio.Task | None = None
50
+
51
+
35
52
  class MCPFunctionGroup(FunctionGroup):
36
53
  """
37
54
  A specialized FunctionGroup for MCP clients that includes MCP-specific attributes
38
- with proper type safety.
55
+ with session management.
56
+
57
+ Locking model (simple + safe; occasional 'temporarily unavailable' is acceptable).
58
+
59
+ RW semantics:
60
+ - Multiple readers may hold the reader lock concurrently.
61
+ - While any reader holds the lock, writers cannot proceed.
62
+ - While the writer holds the lock, no new readers can proceed.
63
+
64
+ Data:
65
+ - _sessions: dict[str, SessionData]; SessionData = {client, last_activity, ref_count, lock}.
66
+
67
+ Locks:
68
+ - _session_rwlock (aiorwlock.RWLock)
69
+ • Reader: very short sections — dict lookups, ref_count ++/--, touch last_activity.
70
+ • Writer: structural changes — create session entries, enforce limits, remove on cleanup.
71
+ - SessionData.lock (asyncio.Lock)
72
+ • Protects per-session ref_count only, taken only while holding RW *reader*.
73
+ • last_activity: written without session lock (timestamp races acceptable for cleanup heuristic).
74
+
75
+ Ordering & awaits:
76
+ - Always acquire RWLock (reader/writer) before SessionData.lock; never the reverse.
77
+ - Never await network I/O under the writer (client creation is the one intentional exception).
78
+ - Client close happens after releasing the writer.
79
+
80
+ Cleanup:
81
+ - Under writer: find inactive (ref_count == 0 and idle > max_age), pop from _sessions, stash clients.
82
+ - After writer: await client.__aexit__() for each stashed client.
83
+ - TOCTOU race: cleanup may read ref_count==0 then a usage increments it; accepted, yields None gracefully.
84
+
85
+ Invariants:
86
+ - ref_count > 0 prevents cleanup.
87
+ - Usage context increments ref_count before yielding and decrements on exit.
88
+ - If a session disappears between ensure/use, callers return "Tool temporarily unavailable".
39
89
  """
40
90
 
41
91
  def __init__(self, *args, **kwargs):
@@ -45,6 +95,23 @@ class MCPFunctionGroup(FunctionGroup):
45
95
  self._mcp_client_server_name: str | None = None
46
96
  self._mcp_client_transport: str | None = None
47
97
 
98
+ # Session management - consolidated data structure
99
+ self._sessions: dict[str, SessionData] = {}
100
+
101
+ # Use RWLock for better concurrency: multiple readers (tool calls) can access
102
+ # existing sessions simultaneously, while writers (create/delete) get exclusive access
103
+ self._session_rwlock = aiorwlock.RWLock()
104
+ # Throttled cleanup control
105
+ self._last_cleanup_check: datetime = datetime.now()
106
+ self._cleanup_check_interval: timedelta = timedelta(minutes=5)
107
+
108
+ # Shared components for session client creation
109
+ self._shared_auth_provider: AuthProviderBase | None = None
110
+ self._client_config: MCPClientConfig | None = None
111
+
112
+ # Use random session id for testing only
113
+ self._use_random_session_id_for_testing: bool = False
114
+
48
115
  @property
49
116
  def mcp_client(self):
50
117
  """Get the MCP client instance."""
@@ -75,109 +142,339 @@ class MCPFunctionGroup(FunctionGroup):
75
142
  """Set the MCP client transport type."""
76
143
  self._mcp_client_transport = transport
77
144
 
145
+ @property
146
+ def session_count(self) -> int:
147
+ """Current number of active sessions."""
148
+ return len(self._sessions)
78
149
 
79
- class MCPToolOverrideConfig(BaseModel):
80
- """
81
- Configuration for overriding tool properties when exposing from MCP server.
82
- """
83
- alias: str | None = Field(default=None, description="Override the tool name (function name in the workflow)")
84
- description: str | None = Field(default=None, description="Override the tool description")
85
-
86
-
87
- class MCPServerConfig(BaseModel):
88
- """
89
- Server connection details for MCP client.
90
- Supports stdio, sse, and streamable-http transports.
91
- streamable-http is the recommended default for HTTP-based connections.
92
- """
93
- transport: Literal["stdio", "sse", "streamable-http"] = Field(
94
- ..., description="Transport type to connect to the MCP server (stdio, sse, or streamable-http)")
95
- url: HttpUrl | None = Field(default=None,
96
- description="URL of the MCP server (for sse or streamable-http transport)")
97
- command: str | None = Field(default=None,
98
- description="Command to run for stdio transport (e.g. 'python' or 'docker')")
99
- args: list[str] | None = Field(default=None, description="Arguments for the stdio command")
100
- env: dict[str, str] | None = Field(default=None, description="Environment variables for the stdio process")
101
-
102
- # Authentication configuration
103
- auth_provider: str | AuthenticationRef | None = Field(default=None,
104
- description="Reference to authentication provider")
105
-
106
- @model_validator(mode="after")
107
- def validate_model(self):
108
- """Validate that stdio and SSE/Streamable HTTP properties are mutually exclusive."""
109
- if self.transport == "stdio":
110
- if self.url is not None:
111
- raise ValueError("url should not be set when using stdio transport")
112
- if not self.command:
113
- raise ValueError("command is required when using stdio transport")
114
- # Auth is not supported for stdio transport
115
- if self.auth_provider is not None:
116
- raise ValueError("Authentication is not supported for stdio transport")
117
- elif self.transport == "sse":
118
- if self.command is not None or self.args is not None or self.env is not None:
119
- raise ValueError("command, args, and env should not be set when using sse transport")
120
- if not self.url:
121
- raise ValueError("url is required when using sse transport")
122
- # Auth is not supported for SSE transport
123
- if self.auth_provider is not None:
124
- raise ValueError("Authentication is not supported for SSE transport.")
125
- elif self.transport == "streamable-http":
126
- if self.command is not None or self.args is not None or self.env is not None:
127
- raise ValueError("command, args, and env should not be set when using streamable-http transport")
128
- if not self.url:
129
- raise ValueError("url is required when using streamable-http transport")
130
-
131
- return self
132
-
133
-
134
- class MCPClientConfig(FunctionGroupBaseConfig, name="mcp_client"):
135
- """
136
- Configuration for connecting to an MCP server as a client and exposing selected tools.
150
+ @property
151
+ def session_limit(self) -> int:
152
+ """Maximum allowed sessions."""
153
+ return self._client_config.max_sessions if self._client_config else 100
154
+
155
+ def _get_random_session_id(self) -> str:
156
+ """Get a random session ID."""
157
+ import uuid
158
+ return str(uuid.uuid4())
159
+
160
+ def _get_session_id_from_context(self) -> str | None:
161
+ """Get the session ID from the current context."""
162
+ try:
163
+ from nat.builder.context import Context as _Ctx
164
+
165
+ # Get session id from context, authentication is done per-websocket session for tool calls
166
+ session_id = None
167
+ # get session id from cookies if session_aware_tools is enabled
168
+ if self._client_config and self._client_config.session_aware_tools:
169
+ cookies = getattr(_Ctx.get().metadata, "cookies", None)
170
+ if cookies:
171
+ if self._use_random_session_id_for_testing:
172
+ # This path is for testing only and should not be used in production
173
+ session_id = self._get_random_session_id()
174
+ else:
175
+ session_id = cookies.get("nat-session")
176
+
177
+ if not session_id:
178
+ # use default user id if allowed
179
+ if self._shared_auth_provider and \
180
+ self._shared_auth_provider.config.allow_default_user_id_for_tool_calls:
181
+ session_id = self._shared_auth_provider.config.default_user_id
182
+ return session_id
183
+ except Exception:
184
+ return None
185
+
186
+ async def cleanup_sessions(self, max_age: timedelta | None = None) -> int:
187
+ """
188
+ Manually trigger cleanup of inactive sessions.
189
+
190
+ Args:
191
+ max_age: Maximum age for sessions before cleanup. If None, uses configured timeout.
192
+
193
+ Returns:
194
+ Number of sessions cleaned up.
195
+ """
196
+ sessions_before = len(self._sessions)
197
+ await self._cleanup_inactive_sessions(max_age)
198
+ sessions_after = len(self._sessions)
199
+ return sessions_before - sessions_after
200
+
201
+ async def _cleanup_inactive_sessions(self, max_age: timedelta | None = None):
202
+ """Remove clients for sessions inactive longer than max_age.
203
+
204
+ This method uses the RWLock writer to ensure thread-safe cleanup.
205
+ """
206
+ if max_age is None:
207
+ max_age = self._client_config.session_idle_timeout if self._client_config else timedelta(hours=1)
208
+
209
+ to_close: list[tuple[str, SessionData]] = []
210
+
211
+ async with self._session_rwlock.writer:
212
+ current_time = datetime.now()
213
+ inactive_sessions = []
214
+
215
+ for session_id, session_data in self._sessions.items():
216
+ # Skip cleanup if session is actively being used
217
+ if session_data.ref_count > 0:
218
+ continue
219
+
220
+ if current_time - session_data.last_activity > max_age:
221
+ inactive_sessions.append(session_id)
222
+
223
+ for session_id in inactive_sessions:
224
+ try:
225
+ logger.info("Cleaning up inactive session client: %s", truncate_session_id(session_id))
226
+ session_data = self._sessions[session_id]
227
+ # Close the client connection
228
+ if session_data:
229
+ to_close.append((session_id, session_data))
230
+ except Exception as e:
231
+ logger.warning("Error cleaning up session client %s: %s", truncate_session_id(session_id), e)
232
+ finally:
233
+ # Always remove from tracking to prevent leaks, even if close failed
234
+ self._sessions.pop(session_id, None)
235
+ logger.info("Cleaned up session tracking for: %s", truncate_session_id(session_id))
236
+ logger.info(" Total sessions: %d", len(self._sessions))
237
+
238
+ # Close sessions outside the writer lock to avoid deadlock
239
+ for session_id, sdata in to_close:
240
+ try:
241
+ if sdata.stop_event and sdata.lifetime_task:
242
+ if not sdata.lifetime_task.done():
243
+ # Instead of directly exiting the task, set the stop event
244
+ # and wait for the task to exit. This ensures the cancel scope
245
+ # is entered and exited in the same task.
246
+ sdata.stop_event.set()
247
+ await sdata.lifetime_task # __aexit__ runs in that task
248
+ else:
249
+ logger.debug("Session client %s lifetime task already done", truncate_session_id(session_id))
250
+ else:
251
+ # add fallback to ensure we clean up the client
252
+ logger.warning("Session client %s lifetime task not found, cleaning up client",
253
+ truncate_session_id(session_id))
254
+ await sdata.client.__aexit__(None, None, None)
255
+ except Exception as e:
256
+ logger.warning("Error cleaning up session client %s: %s", truncate_session_id(session_id), e)
257
+
258
+ async def _get_session_client(self, session_id: str) -> MCPBaseClient:
259
+ """Get the appropriate MCP client for the session."""
260
+ # Throttled cleanup on access
261
+ now = datetime.now()
262
+ if now - self._last_cleanup_check > self._cleanup_check_interval:
263
+ await self._cleanup_inactive_sessions()
264
+ self._last_cleanup_check = now
265
+
266
+ # If the session_id equals the configured default_user_id use the base client
267
+ # instead of creating a per-session client
268
+ if self._shared_auth_provider:
269
+ default_uid = self._shared_auth_provider.config.default_user_id
270
+ if default_uid and session_id == default_uid:
271
+ return self.mcp_client
272
+
273
+ # Fast path: check if session already exists (reader lock for concurrent access)
274
+ async with self._session_rwlock.reader:
275
+ if session_id in self._sessions:
276
+ # Update last activity for existing client
277
+ self._sessions[session_id].last_activity = datetime.now()
278
+ return self._sessions[session_id].client
279
+
280
+ # Check session limit before creating new client (outside writer lock to avoid deadlock)
281
+ if self._client_config and len(self._sessions) >= self._client_config.max_sessions:
282
+ # Try cleanup first to free up space
283
+ await self._cleanup_inactive_sessions()
284
+
285
+ # Slow path: create session with writer lock for exclusive access
286
+ async with self._session_rwlock.writer:
287
+ # Double-check after acquiring writer lock (another coroutine might have created it)
288
+ if session_id in self._sessions:
289
+ self._sessions[session_id].last_activity = datetime.now()
290
+ return self._sessions[session_id].client
291
+
292
+ # Re-check session limit inside writer lock
293
+ if self._client_config and len(self._sessions) >= self._client_config.max_sessions:
294
+ logger.warning("Session limit reached (%d), rejecting new session: %s",
295
+ self._client_config.max_sessions,
296
+ truncate_session_id(session_id))
297
+ raise RuntimeError(f"Tool unavailable: Maximum concurrent sessions "
298
+ f"({self._client_config.max_sessions}) exceeded.")
299
+
300
+ # Create session client lazily
301
+ logger.info("Creating new MCP client for session: %s", truncate_session_id(session_id))
302
+ session_client, stop_event, lifetime_task = await self._create_session_client(session_id)
303
+ session_data = SessionData(
304
+ client=session_client,
305
+ last_activity=datetime.now(),
306
+ ref_count=0,
307
+ stop_event=stop_event,
308
+ lifetime_task=lifetime_task,
309
+ )
310
+
311
+ # Cache the session data
312
+ self._sessions[session_id] = session_data
313
+ logger.info(" Total sessions: %d", len(self._sessions))
314
+ return session_client
315
+
316
+ @asynccontextmanager
317
+ async def _session_usage_context(self, session_id: str):
318
+ """Context manager to track active session usage and prevent cleanup."""
319
+ # Ensure session exists - create it if it doesn't
320
+ if session_id not in self._sessions:
321
+ # Create session client first
322
+ await self._get_session_client(session_id) # START read phase: bump ref_count under reader + session lock
323
+
324
+ async with self._session_rwlock.reader:
325
+ sdata = self._sessions.get(session_id)
326
+ if not sdata:
327
+ # this can happen if the session is cleaned up between the check and the lock
328
+ # this is rare and we can just return that the tool is temporarily unavailable
329
+ yield None
330
+ return
331
+ async with sdata.lock:
332
+ sdata.ref_count += 1
333
+ client = sdata.client # capture
334
+ # END read phase (release reader before long await)
335
+
336
+ try:
337
+ yield client
338
+ finally:
339
+ # Brief read phase to decrement ref_count and touch activity
340
+ async with self._session_rwlock.reader:
341
+ sdata = self._sessions.get(session_id)
342
+ if sdata:
343
+ async with sdata.lock:
344
+ sdata.ref_count -= 1
345
+ sdata.last_activity = datetime.now()
346
+
347
+ async def _create_session_client(self, session_id: str) -> tuple[MCPBaseClient, asyncio.Event, asyncio.Task]:
348
+ """Create a new MCP client instance for the session."""
349
+ from nat.plugins.mcp.client_base import MCPStreamableHTTPClient
350
+
351
+ config = self._client_config
352
+ if not config:
353
+ raise RuntimeError("Client config not initialized")
354
+
355
+ if config.server.transport == "streamable-http":
356
+ client = MCPStreamableHTTPClient(
357
+ str(config.server.url),
358
+ auth_provider=self._shared_auth_provider,
359
+ user_id=session_id, # Pass session_id as user_id for cache isolation
360
+ tool_call_timeout=config.tool_call_timeout,
361
+ auth_flow_timeout=config.auth_flow_timeout,
362
+ reconnect_enabled=config.reconnect_enabled,
363
+ reconnect_max_attempts=config.reconnect_max_attempts,
364
+ reconnect_initial_backoff=config.reconnect_initial_backoff,
365
+ reconnect_max_backoff=config.reconnect_max_backoff)
366
+ else:
367
+ # per-user sessions are only supported for streamable-http transport
368
+ raise ValueError(f"Unsupported transport: {config.server.transport}")
369
+
370
+ ready = asyncio.Event()
371
+ stop_event = asyncio.Event()
372
+
373
+ async def _lifetime():
374
+ """
375
+ Create a lifetime task to respect task boundaries and ensure the
376
+ cancel scope is entered and exited in the same task.
377
+ """
378
+ try:
379
+ async with client:
380
+ ready.set()
381
+ await stop_event.wait()
382
+ except Exception:
383
+ ready.set() # Ensure we don't hang the waiter
384
+ raise
385
+
386
+ task = asyncio.create_task(_lifetime(), name=f"mcp-session-{truncate_session_id(session_id)}")
387
+
388
+ # Wait for initialization with timeout to prevent infinite hangs
389
+ timeout = config.tool_call_timeout.total_seconds() if config else 300
390
+ try:
391
+ await asyncio.wait_for(ready.wait(), timeout=timeout)
392
+ except TimeoutError:
393
+ task.cancel()
394
+ try:
395
+ await task
396
+ except asyncio.CancelledError:
397
+ pass
398
+ logger.error("Session client initialization timed out after %ds for %s",
399
+ timeout,
400
+ truncate_session_id(session_id))
401
+ raise RuntimeError(f"Session client initialization timed out after {timeout}s")
402
+
403
+ # Check if initialization failed before ready was set
404
+ if task.done():
405
+ try:
406
+ await task # Re-raise exception if the task failed
407
+ except Exception as e:
408
+ logger.error("Failed to initialize session client for %s: %s", truncate_session_id(session_id), e)
409
+ raise RuntimeError(f"Failed to initialize session client: {e}") from e
410
+
411
+ logger.info("Created session client for session: %s", truncate_session_id(session_id))
412
+ # NOTE: caller will place client into SessionData and attach stop_event/task
413
+ return client, stop_event, task
414
+
415
+
416
+ def mcp_session_tool_function(tool, function_group: MCPFunctionGroup):
417
+ """Create a session-aware NAT function for an MCP tool.
418
+
419
+ Routes each invocation to the appropriate per-session MCP client while
420
+ preserving the original tool input schema, converters, and description.
137
421
  """
138
- server: MCPServerConfig = Field(..., description="Server connection details (transport, url/command, etc.)")
139
- tool_call_timeout: timedelta = Field(
140
- default=timedelta(seconds=60),
141
- description="Timeout (in seconds) for the MCP tool call. Defaults to 60 seconds.")
142
- auth_flow_timeout: timedelta = Field(
143
- default=timedelta(seconds=300),
144
- description="Timeout (in seconds) for the MCP auth flow. When the tool call requires interactive \
145
- authentication, this timeout is used. Defaults to 300 seconds.")
146
- reconnect_enabled: bool = Field(
147
- default=True,
148
- description="Whether to enable reconnecting to the MCP server if the connection is lost. \
149
- Defaults to True.")
150
- reconnect_max_attempts: int = Field(default=2,
151
- ge=0,
152
- description="Maximum number of reconnect attempts. Defaults to 2.")
153
- reconnect_initial_backoff: float = Field(
154
- default=0.5, ge=0.0, description="Initial backoff time for reconnect attempts. Defaults to 0.5 seconds.")
155
- reconnect_max_backoff: float = Field(
156
- default=50.0, ge=0.0, description="Maximum backoff time for reconnect attempts. Defaults to 50 seconds.")
157
- tool_overrides: dict[str, MCPToolOverrideConfig] | None = Field(
158
- default=None,
159
- description="""Optional tool name overrides and description changes.
160
- Example:
161
- tool_overrides:
162
- calculator_add:
163
- alias: "add_numbers"
164
- description: "Add two numbers together"
165
- calculator_multiply:
166
- description: "Multiply two numbers" # alias defaults to original name
167
- """)
168
-
169
- @model_validator(mode="after")
170
- def _validate_reconnect_backoff(self) -> "MCPClientConfig":
171
- """Validate reconnect backoff values."""
172
- if self.reconnect_max_backoff < self.reconnect_initial_backoff:
173
- raise ValueError("reconnect_max_backoff must be greater than or equal to reconnect_initial_backoff")
174
- return self
422
+ from nat.builder.function import FunctionInfo
423
+
424
+ def _convert_from_str(input_str: str) -> tool.input_schema:
425
+ return tool.input_schema.model_validate_json(input_str)
426
+
427
+ async def _response_fn(tool_input: BaseModel | None = None, **kwargs) -> str:
428
+ """Response function for the session-aware tool."""
429
+ try:
430
+ # Route to the appropriate session client
431
+ session_id = function_group._get_session_id_from_context()
432
+
433
+ # If no session is available and default-user fallback is disabled, deny the call
434
+ if function_group._shared_auth_provider and session_id is None:
435
+ return "User not authorized to call the tool"
436
+
437
+ # Check if this is the default user - if so, use base client directly
438
+ if (not function_group._shared_auth_provider
439
+ or session_id == function_group._shared_auth_provider.config.default_user_id):
440
+ # Use base client directly for default user
441
+ client = function_group.mcp_client
442
+ session_tool = await client.get_tool(tool.name)
443
+ else:
444
+ # Use session usage context to prevent cleanup during tool execution
445
+ async with function_group._session_usage_context(session_id) as client:
446
+ if client is None:
447
+ return "Tool temporarily unavailable. Try again."
448
+ session_tool = await client.get_tool(tool.name)
449
+
450
+ # Preserve original calling convention
451
+ if tool_input:
452
+ args = tool_input.model_dump()
453
+ return await session_tool.acall(args)
454
+
455
+ _ = session_tool.input_schema.model_validate(kwargs)
456
+ return await session_tool.acall(kwargs)
457
+ except Exception as e:
458
+ if tool_input:
459
+ logger.warning("Error calling tool %s with serialized input: %s",
460
+ tool.name,
461
+ tool_input.model_dump(),
462
+ exc_info=True)
463
+ else:
464
+ logger.warning("Error calling tool %s with input: %s", tool.name, kwargs, exc_info=True)
465
+ return str(e)
466
+
467
+ return FunctionInfo.create(single_fn=_response_fn,
468
+ description=tool.description,
469
+ input_schema=tool.input_schema,
470
+ converters=[_convert_from_str])
175
471
 
176
472
 
177
473
  @register_function_group(config_type=MCPClientConfig)
178
474
  async def mcp_client_function_group(config: MCPClientConfig, _builder: Builder):
179
475
  """
180
476
  Connect to an MCP server and expose tools as a function group.
477
+
181
478
  Args:
182
479
  config: The configuration for the MCP client
183
480
  _builder: The builder
@@ -215,8 +512,11 @@ async def mcp_client_function_group(config: MCPClientConfig, _builder: Builder):
215
512
  reconnect_initial_backoff=config.reconnect_initial_backoff,
216
513
  reconnect_max_backoff=config.reconnect_max_backoff)
217
514
  elif config.server.transport == "streamable-http":
515
+ # Use default_user_id for the base client
516
+ base_user_id = auth_provider.config.default_user_id if auth_provider else None
218
517
  client = MCPStreamableHTTPClient(str(config.server.url),
219
518
  auth_provider=auth_provider,
519
+ user_id=base_user_id,
220
520
  tool_call_timeout=config.tool_call_timeout,
221
521
  auth_flow_timeout=config.auth_flow_timeout,
222
522
  reconnect_enabled=config.reconnect_enabled,
@@ -231,6 +531,10 @@ async def mcp_client_function_group(config: MCPClientConfig, _builder: Builder):
231
531
  # Create the MCP function group
232
532
  group = MCPFunctionGroup(config=config)
233
533
 
534
+ # Store shared components for session client creation
535
+ group._shared_auth_provider = auth_provider
536
+ group._client_config = config
537
+
234
538
  async with client:
235
539
  # Expose the live MCP client on the function group instance so other components (e.g., HTTP endpoints)
236
540
  # can reuse the already-established session instead of creating a new client per request.
@@ -250,13 +554,13 @@ async def mcp_client_function_group(config: MCPClientConfig, _builder: Builder):
250
554
  function_name = override.alias if override and override.alias else tool_name
251
555
  description = override.description if override and override.description else tool.description
252
556
 
253
- # Create the tool function
254
- tool_fn = mcp_tool_function(tool)
557
+ # Create the tool function according to configuration
558
+ tool_fn = mcp_session_tool_function(tool, group)
255
559
 
256
560
  # Normalize optional typing for linter/type-checker compatibility
257
561
  single_fn = tool_fn.single_fn
258
562
  if single_fn is None:
259
- # Should not happen because mcp_tool_function always sets a single_fn
563
+ # Should not happen because FunctionInfo always sets a single_fn
260
564
  logger.warning("Skipping tool %s because single_fn is None", function_name)
261
565
  continue
262
566
 
@@ -280,6 +584,7 @@ def mcp_apply_tool_alias_and_description(
280
584
  all_tools: dict, tool_overrides: dict[str, MCPToolOverrideConfig] | None) -> dict[str, MCPToolOverrideConfig]:
281
585
  """
282
586
  Filter tool overrides to only include tools that exist in the MCP server.
587
+
283
588
  Args:
284
589
  all_tools: The tools from the MCP server
285
590
  tool_overrides: The tool overrides to apply
nat/plugins/mcp/tool.py CHANGED
@@ -26,6 +26,7 @@ from nat.builder.function_info import FunctionInfo
26
26
  from nat.cli.register_workflow import register_function
27
27
  from nat.data_models.function import FunctionBaseConfig
28
28
  from nat.plugins.mcp.client_base import MCPToolClient
29
+ from nat.utils.decorators import deprecated
29
30
 
30
31
  logger = logging.getLogger(__name__)
31
32
 
@@ -109,6 +110,10 @@ def mcp_tool_function(tool: MCPToolClient) -> FunctionInfo:
109
110
 
110
111
 
111
112
  @register_function(config_type=MCPToolConfig)
113
+ @deprecated(
114
+ reason=
115
+ "This function is being replaced with the new mcp_client function group that supports additional MCP features",
116
+ feature_name="mcp_tool_wrapper")
112
117
  async def mcp_tool(config: MCPToolConfig, builder: Builder):
113
118
  """
114
119
  Generate a NeMo Agent Toolkit Function that wraps a tool provided by the MCP server.
nat/plugins/mcp/utils.py CHANGED
@@ -21,6 +21,22 @@ from pydantic import Field
21
21
  from pydantic import create_model
22
22
 
23
23
 
24
+ def truncate_session_id(session_id: str, max_length: int = 10) -> str:
25
+ """
26
+ Truncate a session ID for logging purposes.
27
+
28
+ Args:
29
+ session_id: The session ID to truncate
30
+ max_length: Maximum length before truncation (default: 10)
31
+
32
+ Returns:
33
+ Truncated session ID with "..." if longer than max_length, otherwise full ID
34
+ """
35
+ if len(session_id) > max_length:
36
+ return session_id[:max_length] + "..."
37
+ return session_id
38
+
39
+
24
40
  def model_from_mcp_schema(name: str, mcp_input_schema: dict) -> type[BaseModel]:
25
41
  """
26
42
  Create a pydantic model from the input schema of the MCP tool