datarobot-genai 0.2.39__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. datarobot_genai/core/agents/__init__.py +1 -1
  2. datarobot_genai/core/agents/base.py +5 -2
  3. datarobot_genai/core/chat/responses.py +6 -1
  4. datarobot_genai/core/utils/auth.py +188 -31
  5. datarobot_genai/crewai/__init__.py +1 -4
  6. datarobot_genai/crewai/agent.py +150 -17
  7. datarobot_genai/crewai/events.py +11 -4
  8. datarobot_genai/drmcp/__init__.py +4 -2
  9. datarobot_genai/drmcp/core/config.py +21 -1
  10. datarobot_genai/drmcp/core/mcp_instance.py +5 -49
  11. datarobot_genai/drmcp/core/routes.py +108 -13
  12. datarobot_genai/drmcp/core/tool_config.py +16 -0
  13. datarobot_genai/drmcp/core/utils.py +110 -0
  14. datarobot_genai/drmcp/test_utils/tool_base_ete.py +41 -26
  15. datarobot_genai/drmcp/tools/clients/gdrive.py +2 -0
  16. datarobot_genai/drmcp/tools/clients/microsoft_graph.py +96 -0
  17. datarobot_genai/drmcp/tools/clients/perplexity.py +173 -0
  18. datarobot_genai/drmcp/tools/clients/tavily.py +199 -0
  19. datarobot_genai/drmcp/tools/confluence/tools.py +0 -5
  20. datarobot_genai/drmcp/tools/gdrive/tools.py +12 -59
  21. datarobot_genai/drmcp/tools/jira/tools.py +4 -8
  22. datarobot_genai/drmcp/tools/microsoft_graph/tools.py +135 -19
  23. datarobot_genai/drmcp/tools/perplexity/__init__.py +0 -0
  24. datarobot_genai/drmcp/tools/perplexity/tools.py +117 -0
  25. datarobot_genai/drmcp/tools/predictive/data.py +1 -9
  26. datarobot_genai/drmcp/tools/predictive/deployment.py +0 -8
  27. datarobot_genai/drmcp/tools/predictive/deployment_info.py +0 -19
  28. datarobot_genai/drmcp/tools/predictive/model.py +0 -21
  29. datarobot_genai/drmcp/tools/predictive/predict_realtime.py +3 -0
  30. datarobot_genai/drmcp/tools/predictive/project.py +3 -19
  31. datarobot_genai/drmcp/tools/predictive/training.py +1 -19
  32. datarobot_genai/drmcp/tools/tavily/__init__.py +13 -0
  33. datarobot_genai/drmcp/tools/tavily/tools.py +141 -0
  34. datarobot_genai/langgraph/agent.py +10 -2
  35. datarobot_genai/llama_index/__init__.py +1 -1
  36. datarobot_genai/llama_index/agent.py +284 -5
  37. datarobot_genai/nat/agent.py +17 -6
  38. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/METADATA +3 -1
  39. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/RECORD +43 -40
  40. datarobot_genai/crewai/base.py +0 -159
  41. datarobot_genai/drmcp/core/tool_filter.py +0 -117
  42. datarobot_genai/llama_index/base.py +0 -299
  43. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/WHEEL +0 -0
  44. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/entry_points.txt +0 -0
  45. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/licenses/AUTHORS +0 -0
  46. {datarobot_genai-0.2.39.dist-info → datarobot_genai-0.3.1.dist-info}/licenses/LICENSE +0 -0
@@ -17,7 +17,7 @@ This package provides:
17
17
  - BaseAgent: common initialization for agent env/config fields
18
18
  - Common helpers: make_system_prompt, extract_user_prompt_content
19
19
  - Framework utilities (optional extras):
20
- - crewai: build_llm, create_pipeline_interactions_from_messages
20
+ - crewai: create_pipeline_interactions_from_messages
21
21
  - langgraph: create_pipeline_interactions_from_events
22
22
  - llamaindex: DataRobotLiteLLM, create_pipeline_interactions_from_events
23
23
  """
@@ -12,6 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
+ from __future__ import annotations
16
+
15
17
  import abc
16
18
  import json
17
19
  import os
@@ -19,6 +21,7 @@ from collections.abc import AsyncGenerator
19
21
  from collections.abc import Mapping
20
22
  from typing import Any
21
23
  from typing import Generic
24
+ from typing import TypeAlias
22
25
  from typing import TypedDict
23
26
  from typing import TypeVar
24
27
  from typing import cast
@@ -98,7 +101,7 @@ class BaseAgent(Generic[TTool], abc.ABC):
98
101
  return get_api_base(self.api_base, deployment_id)
99
102
 
100
103
  @abc.abstractmethod
101
- async def invoke(self, completion_create_params: CompletionCreateParams) -> "InvokeReturn":
104
+ async def invoke(self, completion_create_params: CompletionCreateParams) -> InvokeReturn:
102
105
  raise NotImplementedError("Not implemented")
103
106
 
104
107
  @classmethod
@@ -167,7 +170,7 @@ class UsageMetrics(TypedDict):
167
170
 
168
171
 
169
172
  # Canonical return type for DRUM-compatible invoke implementations
170
- InvokeReturn = (
173
+ InvokeReturn: TypeAlias = (
171
174
  AsyncGenerator[tuple[str | Event, MultiTurnSample | None, UsageMetrics], None]
172
175
  | tuple[str, MultiTurnSample | None, UsageMetrics]
173
176
  )
@@ -14,6 +14,8 @@
14
14
 
15
15
  """OpenAI-compatible response helpers for chat interactions."""
16
16
 
17
+ from __future__ import annotations
18
+
17
19
  import asyncio
18
20
  import queue
19
21
  import time
@@ -24,6 +26,7 @@ from collections.abc import AsyncGenerator
24
26
  from collections.abc import AsyncIterator
25
27
  from collections.abc import Iterator
26
28
  from concurrent.futures import ThreadPoolExecutor
29
+ from typing import TYPE_CHECKING
27
30
  from typing import Any
28
31
  from typing import TypeVar
29
32
 
@@ -38,10 +41,12 @@ from openai.types.chat import ChatCompletionMessage
38
41
  from openai.types.chat.chat_completion import Choice
39
42
  from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice
40
43
  from openai.types.chat.chat_completion_chunk import ChoiceDelta
41
- from ragas import MultiTurnSample
42
44
 
43
45
  from datarobot_genai.core.agents import default_usage_metrics
44
46
 
47
+ if TYPE_CHECKING:
48
+ from ragas import MultiTurnSample
49
+
45
50
 
46
51
  class CustomModelChatResponse(ChatCompletion):
47
52
  pipeline_interactions: str | None = None
@@ -12,13 +12,19 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  import logging
15
+ import os
15
16
  import warnings
17
+ from collections.abc import Sequence
16
18
  from typing import Any
19
+ from typing import Protocol
17
20
 
21
+ import aiohttp
18
22
  import jwt
19
- from datarobot.auth.datarobot.oauth import AsyncOAuth as DatarobotAsyncOAuthClient
23
+ from datarobot.auth.datarobot.oauth import AsyncOAuth as DatarobotOAuthClient
24
+ from datarobot.auth.exceptions import OAuthProviderNotFound
25
+ from datarobot.auth.exceptions import OAuthValidationErr
20
26
  from datarobot.auth.identity import Identity
21
- from datarobot.auth.oauth import AsyncOAuthComponent
27
+ from datarobot.auth.oauth import OAuthToken
22
28
  from datarobot.auth.session import AuthCtx
23
29
  from datarobot.core.config import DataRobotAppFrameworkBaseSettings
24
30
  from datarobot.models.genai.agent.auth import ToolAuth
@@ -183,52 +189,203 @@ class AuthContextHeaderHandler:
183
189
  return None
184
190
 
185
191
 
192
+ # --- OAuth Token Provider Implementation ---
193
+
194
+
195
+ class TokenRetriever(Protocol):
196
+ """Protocol for OAuth token retrievers."""
197
+
198
+ def filter_identities(self, identities: Sequence[Identity]) -> list[Identity]:
199
+ """Filter identities to only those valid for this retriever implementation."""
200
+ ...
201
+
202
+ async def refresh_access_token(self, identity: Identity) -> OAuthToken:
203
+ """Refresh the access token for the given identity ID.
204
+
205
+ Parameters
206
+ ----------
207
+ identity_id : str
208
+ The provider identity ID to refresh the token for.
209
+
210
+ Returns
211
+ -------
212
+ OAuthToken
213
+ The refreshed OAuth token.
214
+ """
215
+ ...
216
+
217
+
218
+ class DatarobotTokenRetriever:
219
+ """Retrieves OAuth tokens using the DataRobot platform."""
220
+
221
+ def __init__(self) -> None:
222
+ self._client = DatarobotOAuthClient()
223
+
224
+ def filter_identities(self, identities: Sequence[Identity]) -> list[Identity]:
225
+ """Filter oauth2 identities to only those with provider_identity_id.
226
+
227
+ The `provider_identity_id` is required in order to identify the provider
228
+ and retrieve the access token from DataRobot OAuth Providers service.
229
+ """
230
+ return [i for i in identities if i.type == "oauth2" and i.provider_identity_id]
231
+
232
+ async def refresh_access_token(self, identity: Identity) -> OAuthToken:
233
+ """Refresh the access token using DataRobot's OAuth client."""
234
+ return await self._client.refresh_access_token(identity_id=identity.provider_identity_id)
235
+
236
+
237
+ class AuthlibTokenRetriever:
238
+ """Retrieves OAuth tokens from a generic Authlib-based endpoint."""
239
+
240
+ def __init__(self, application_endpoint: str) -> None:
241
+ if not application_endpoint:
242
+ raise ValueError("AuthlibTokenRetriever requires 'application_endpoint'.")
243
+ self.application_endpoint = application_endpoint.rstrip("/")
244
+
245
+ def filter_identities(self, identities: Sequence[Identity]) -> list[Identity]:
246
+ """Filter identities to only OAuth2 identities."""
247
+ return [i for i in identities if i.type == "oauth2" and i.provider_identity_id is None]
248
+
249
+ async def refresh_access_token(self, identity: Identity) -> OAuthToken:
250
+ """Retrieve an OAuth token via an HTTP POST request.
251
+
252
+ Parameters
253
+ ----------
254
+ identity : Identity
255
+ The identity to retrieve the token for.
256
+
257
+ Returns
258
+ -------
259
+ OAuthToken
260
+ The retrieved OAuth token.
261
+ """
262
+ api_token = os.environ.get("DATAROBOT_API_TOKEN")
263
+ if not api_token:
264
+ raise ValueError("DATAROBOT_API_TOKEN environment variable is required but not set.")
265
+
266
+ token_url = f"{self.application_endpoint}/oauth/token/"
267
+ headers = {"Authorization": f"Bearer {api_token}"}
268
+ payload = {"identity_id": identity.id}
269
+ timeout = aiohttp.ClientTimeout(total=30)
270
+
271
+ try:
272
+ async with aiohttp.ClientSession(timeout=timeout) as session:
273
+ async with session.post(token_url, headers=headers, json=payload) as response:
274
+ response.raise_for_status()
275
+ data = await response.json()
276
+ logger.debug(f"Retrieved access token from {token_url}")
277
+ return OAuthToken(**data)
278
+ except aiohttp.ClientError as e:
279
+ logger.error(f"Error retrieving token from {token_url}: {e}")
280
+ raise
281
+
282
+
283
+ class OAuthConfig(BaseModel):
284
+ """Configuration extracted from AuthCtx metadata for OAuth operations."""
285
+
286
+ implementation: str = "datarobot"
287
+ application_endpoint: str | None = None
288
+
289
+ @classmethod
290
+ def from_auth_ctx(cls, auth_ctx: AuthCtx) -> "OAuthConfig":
291
+ metadata = auth_ctx.metadata or {}
292
+ return cls(
293
+ implementation=metadata.get("oauth_implementation", "datarobot"),
294
+ application_endpoint=metadata.get("application_endpoint"),
295
+ )
296
+
297
+
298
+ def create_token_retriever(config: OAuthConfig) -> TokenRetriever:
299
+ """Create a token retriever based on the OAuth configuration.
300
+
301
+ Parameters
302
+ ----------
303
+ config : OAuthConfig
304
+ The OAuth configuration specifying implementation type and endpoints.
305
+
306
+ Returns
307
+ -------
308
+ TokenRetriever
309
+ The configured token retriever instance.
310
+ """
311
+ if config.implementation == "datarobot":
312
+ return DatarobotTokenRetriever()
313
+
314
+ if config.implementation == "authlib":
315
+ if not config.application_endpoint:
316
+ raise ValueError("Required 'application_endpoint' not found in metadata.")
317
+ return AuthlibTokenRetriever(config.application_endpoint)
318
+
319
+ raise ValueError(
320
+ f"Unsupported OAuth implementation: '{config.implementation}'. "
321
+ f"Supported values: datarobot, authlib."
322
+ )
323
+
324
+
186
325
  class AsyncOAuthTokenProvider:
187
- """Manages OAuth access tokens using generic OAuth client."""
326
+ """Provides OAuth tokens for authorized users.
327
+
328
+ This class manages OAuth token retrieval for users with multiple identity providers.
329
+ It uses either DataRobot or Authlib as the OAuth token storage and refresh backend
330
+ based on the auth context metadata.
331
+ """
188
332
 
189
333
  def __init__(self, auth_ctx: AuthCtx) -> None:
334
+ """Initialize the provider with an authorization context.
335
+
336
+ Parameters
337
+ ----------
338
+ auth_ctx : AuthCtx
339
+ The authorization context containing user identities and metadata.
340
+ """
190
341
  self.auth_ctx = auth_ctx
191
- self.oauth_client = self._create_oauth_client()
342
+ config = OAuthConfig.from_auth_ctx(auth_ctx)
343
+ self._retriever = create_token_retriever(config)
192
344
 
193
345
  def _get_identity(self, provider_type: str | None) -> Identity:
194
- """Retrieve the appropriate identity from the authentication context."""
195
- identities = [x for x in self.auth_ctx.identities if x.provider_identity_id is not None]
196
-
197
- if not identities:
198
- raise ValueError("No identities found in authorization context.")
346
+ """Get identity from auth context, filtered by provider_type if specified."""
347
+ oauth_identities = self._retriever.filter_identities(self.auth_ctx.identities)
348
+ if not oauth_identities:
349
+ raise OAuthProviderNotFound("No OAuth provider found.")
199
350
 
200
351
  if provider_type is None:
201
- if len(identities) > 1:
202
- raise ValueError(
203
- "Multiple identities found. Please specify 'provider_type' parameter."
352
+ if len(oauth_identities) > 1:
353
+ raise OAuthValidationErr(
354
+ "Multiple OAuth providers found. Specify 'provider_type' parameter."
204
355
  )
205
- return identities[0]
206
-
207
- identity = next((id for id in identities if id.provider_type == provider_type), None)
356
+ return oauth_identities[0]
208
357
 
358
+ identity = next((i for i in oauth_identities if i.provider_type == provider_type), None)
209
359
  if identity is None:
210
- raise ValueError(f"No identity found for provider '{provider_type}'.")
211
-
360
+ raise OAuthValidationErr(f"No identity found for provider '{provider_type}'.")
212
361
  return identity
213
362
 
214
363
  async def get_token(self, auth_type: ToolAuth, provider_type: str | None = None) -> str:
215
- """Get OAuth access token using the specified method."""
364
+ """Get an OAuth access token for the specified auth type and provider.
365
+
366
+ Parameters
367
+ ----------
368
+ auth_type : ToolAuth
369
+ Authentication type (only OBO is supported).
370
+ provider_type : str, optional
371
+ The specific provider to use (e.g., 'google'). Required if multiple
372
+ identities are available.
373
+
374
+ Returns
375
+ -------
376
+ str
377
+ The retrieved OAuth access token.
378
+
379
+ Raises
380
+ ------
381
+ ValueError
382
+ If the auth type is unsupported or if a suitable identity cannot be found.
383
+ """
216
384
  if auth_type != ToolAuth.OBO:
217
385
  raise ValueError(
218
- f"Unsupported auth type: {auth_type}. Only {ToolAuth.OBO} is supported."
386
+ f"Unsupported auth type: {auth_type}. Only OBO (on-behalf-of) is supported."
219
387
  )
220
388
 
221
389
  identity = self._get_identity(provider_type)
222
- token_data = await self.oauth_client.refresh_access_token(
223
- identity_id=identity.provider_identity_id
224
- )
390
+ token_data = await self._retriever.refresh_access_token(identity)
225
391
  return token_data.access_token
226
-
227
- def _create_oauth_client(self) -> AsyncOAuthComponent:
228
- """Create either DataRobot or Authlib OAuth client based on
229
- authorization context.
230
-
231
- Note: at the moment, only DataRobot OAuth client is supported.
232
- """
233
- logger.debug("Using DataRobot OAuth client")
234
- return DatarobotAsyncOAuthClient()
@@ -2,22 +2,19 @@
2
2
 
3
3
  Public API:
4
4
  - mcp_tools_context: Context manager returning available MCP tools for CrewAI.
5
- - build_llm: Construct a CrewAI LLM configured for DataRobot endpoints.
6
5
  - create_pipeline_interactions_from_messages: Convert messages to MultiTurnSample.
7
6
  """
8
7
 
9
8
  from datarobot_genai.core.mcp.common import MCPConfig
10
9
 
11
- from .agent import build_llm
10
+ from .agent import CrewAIAgent
12
11
  from .agent import create_pipeline_interactions_from_messages
13
- from .base import CrewAIAgent
14
12
  from .events import CrewAIEventListener
15
13
  from .mcp import mcp_tools_context
16
14
 
17
15
  __all__ = [
18
16
  "mcp_tools_context",
19
17
  "CrewAIAgent",
20
- "build_llm",
21
18
  "create_pipeline_interactions_from_messages",
22
19
  "CrewAIEventListener",
23
20
  "MCPConfig",
@@ -12,26 +12,42 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from crewai import LLM
16
- from ragas import MultiTurnSample
17
- from ragas.messages import AIMessage
18
- from ragas.messages import HumanMessage
19
- from ragas.messages import ToolMessage
15
+ """
16
+ Base class for CrewAI-based agents.
20
17
 
21
- from datarobot_genai.core.utils.urls import get_api_base
18
+ Manages MCP tool lifecycle and standardizes kickoff flow.
22
19
 
20
+ Note: This base does not capture pipeline interactions; it returns None by
21
+ default. Subclasses may implement message capture if they need interactions.
22
+ """
23
23
 
24
- def build_llm(
25
- *,
26
- api_base: str,
27
- api_key: str | None,
28
- model: str,
29
- deployment_id: str | None,
30
- timeout: int,
31
- ) -> LLM:
32
- """Create a CrewAI LLM configured for DataRobot LLM Gateway or deployment."""
33
- base = get_api_base(api_base, deployment_id)
34
- return LLM(model=model, api_base=base, api_key=api_key, timeout=timeout)
24
+ from __future__ import annotations
25
+
26
+ import abc
27
+ import asyncio
28
+ from collections.abc import AsyncGenerator
29
+ from typing import TYPE_CHECKING
30
+ from typing import Any
31
+
32
+ from crewai import Crew
33
+ from crewai.events.event_bus import CrewAIEventsBus
34
+ from crewai.tools import BaseTool
35
+ from openai.types.chat import CompletionCreateParams
36
+
37
+ from datarobot_genai.core.agents.base import BaseAgent
38
+ from datarobot_genai.core.agents.base import InvokeReturn
39
+ from datarobot_genai.core.agents.base import UsageMetrics
40
+ from datarobot_genai.core.agents.base import default_usage_metrics
41
+ from datarobot_genai.core.agents.base import extract_user_prompt_content
42
+ from datarobot_genai.core.agents.base import is_streaming
43
+
44
+ from .mcp import mcp_tools_context
45
+
46
+ if TYPE_CHECKING:
47
+ from ragas import MultiTurnSample
48
+ from ragas.messages import AIMessage
49
+ from ragas.messages import HumanMessage
50
+ from ragas.messages import ToolMessage
35
51
 
36
52
 
37
53
  def create_pipeline_interactions_from_messages(
@@ -39,4 +55,121 @@ def create_pipeline_interactions_from_messages(
39
55
  ) -> MultiTurnSample | None:
40
56
  if not messages:
41
57
  return None
58
+ # Lazy import to reduce memory overhead when ragas is not used
59
+ from ragas import MultiTurnSample
60
+
42
61
  return MultiTurnSample(user_input=messages)
62
+
63
+
64
+ class CrewAIAgent(BaseAgent[BaseTool], abc.ABC):
65
+ """Abstract base agent for CrewAI workflows.
66
+
67
+ Subclasses should define the ``agents`` and ``tasks`` properties
68
+ and may override ``build_crewai_workflow`` to customize the workflow
69
+ construction.
70
+ """
71
+
72
+ @property
73
+ @abc.abstractmethod
74
+ def agents(self) -> list[Any]: # CrewAI Agent list
75
+ raise NotImplementedError
76
+
77
+ @property
78
+ @abc.abstractmethod
79
+ def tasks(self) -> list[Any]: # CrewAI Task list
80
+ raise NotImplementedError
81
+
82
+ def build_crewai_workflow(self) -> Any:
83
+ """Create a CrewAI workflow instance.
84
+
85
+ Default implementation constructs a Crew with provided agents and tasks.
86
+ Subclasses can override to customize Crew options.
87
+ """
88
+ return Crew(agents=self.agents, tasks=self.tasks, verbose=self.verbose)
89
+
90
+ @abc.abstractmethod
91
+ def make_kickoff_inputs(self, user_prompt_content: str) -> dict[str, Any]:
92
+ """Build the inputs dict for ``Crew.kickoff``.
93
+
94
+ Subclasses must implement this to provide the exact inputs required
95
+ by their CrewAI tasks.
96
+ """
97
+ raise NotImplementedError
98
+
99
+ def _extract_pipeline_interactions(self) -> MultiTurnSample | None:
100
+ """Extract pipeline interactions from event listener if available."""
101
+ if not hasattr(self, "event_listener"):
102
+ return None
103
+ try:
104
+ listener = getattr(self, "event_listener", None)
105
+ messages = getattr(listener, "messages", None) if listener is not None else None
106
+ return create_pipeline_interactions_from_messages(messages)
107
+ except Exception:
108
+ return None
109
+
110
+ def _extract_usage_metrics(self, crew_output: Any) -> UsageMetrics:
111
+ """Extract usage metrics from crew output."""
112
+ token_usage = getattr(crew_output, "token_usage", None)
113
+ if token_usage is not None:
114
+ return {
115
+ "completion_tokens": int(getattr(token_usage, "completion_tokens", 0)),
116
+ "prompt_tokens": int(getattr(token_usage, "prompt_tokens", 0)),
117
+ "total_tokens": int(getattr(token_usage, "total_tokens", 0)),
118
+ }
119
+ return default_usage_metrics()
120
+
121
+ def _process_crew_output(
122
+ self, crew_output: Any
123
+ ) -> tuple[str, MultiTurnSample | None, UsageMetrics]:
124
+ """Process crew output into response tuple."""
125
+ response_text = str(crew_output.raw)
126
+ pipeline_interactions = self._extract_pipeline_interactions()
127
+ usage_metrics = self._extract_usage_metrics(crew_output)
128
+ return response_text, pipeline_interactions, usage_metrics
129
+
130
+ async def invoke(self, completion_create_params: CompletionCreateParams) -> InvokeReturn:
131
+ """Run the CrewAI workflow with the provided completion parameters."""
132
+ user_prompt_content = extract_user_prompt_content(completion_create_params)
133
+ # Preserve prior template startup print for CLI parity
134
+ try:
135
+ print("Running agent with user prompt:", user_prompt_content, flush=True)
136
+ except Exception:
137
+ # Printing is best-effort; proceed regardless
138
+ pass
139
+
140
+ # Use MCP context manager to handle connection lifecycle
141
+ with mcp_tools_context(
142
+ authorization_context=self._authorization_context,
143
+ forwarded_headers=self.forwarded_headers,
144
+ ) as mcp_tools:
145
+ # Set MCP tools for all agents if MCP is not configured this is effectively a no-op
146
+ self.set_mcp_tools(mcp_tools)
147
+
148
+ # If an event listener is provided by the subclass/template, register it
149
+ if hasattr(self, "event_listener") and CrewAIEventsBus is not None:
150
+ try:
151
+ listener = getattr(self, "event_listener")
152
+ setup_fn = getattr(listener, "setup_listeners", None)
153
+ if callable(setup_fn):
154
+ setup_fn(CrewAIEventsBus)
155
+ except Exception:
156
+ # Listener is optional best-effort; proceed without failing invoke
157
+ pass
158
+
159
+ crew = self.build_crewai_workflow()
160
+
161
+ if is_streaming(completion_create_params):
162
+
163
+ async def _gen() -> AsyncGenerator[
164
+ tuple[str, MultiTurnSample | None, UsageMetrics]
165
+ ]:
166
+ crew_output = await asyncio.to_thread(
167
+ crew.kickoff,
168
+ inputs=self.make_kickoff_inputs(user_prompt_content),
169
+ )
170
+ yield self._process_crew_output(crew_output)
171
+
172
+ return _gen()
173
+
174
+ crew_output = crew.kickoff(inputs=self.make_kickoff_inputs(user_prompt_content))
175
+ return self._process_crew_output(crew_output)
@@ -16,12 +16,13 @@ from __future__ import annotations
16
16
  import importlib
17
17
  import json
18
18
  import logging
19
+ from typing import TYPE_CHECKING
19
20
  from typing import Any
20
21
 
21
- from ragas.messages import AIMessage
22
- from ragas.messages import HumanMessage
23
- from ragas.messages import ToolCall
24
- from ragas.messages import ToolMessage
22
+ if TYPE_CHECKING:
23
+ from ragas.messages import AIMessage
24
+ from ragas.messages import HumanMessage
25
+ from ragas.messages import ToolMessage
25
26
 
26
27
  # Resolve crewai symbols at runtime to avoid mypy issues with untyped packages
27
28
  try:
@@ -65,6 +66,12 @@ class CrewAIEventListener:
65
66
  self.messages: list[HumanMessage | AIMessage | ToolMessage] = []
66
67
 
67
68
  def setup_listeners(self, crewai_event_bus: Any) -> None:
69
+ # Lazy import to reduce memory overhead when ragas is not used
70
+ from ragas.messages import AIMessage
71
+ from ragas.messages import HumanMessage
72
+ from ragas.messages import ToolCall
73
+ from ragas.messages import ToolMessage
74
+
68
75
  @crewai_event_bus.on(CrewKickoffStartedEvent)
69
76
  def on_crew_execution_started(_: Any, event: Any) -> None:
70
77
  self.messages.append(
@@ -19,7 +19,8 @@ A reusable library for building Model Context Protocol (MCP) servers with DataRo
19
19
  """
20
20
 
21
21
  # Export main server components
22
- from datarobot_genai.drmcp.test_utils.clients.openai import OpenAILLMMCPClient
22
+ from datarobot_genai.drmcp.core.utils import filter_tools_by_tags
23
+ from datarobot_genai.drmcp.test_utils.clients.dr_gateway import DRLLMGatewayMCPClient
23
24
  from datarobot_genai.drmcp.test_utils.mcp_utils_ete import ete_test_mcp_session
24
25
  from datarobot_genai.drmcp.test_utils.mcp_utils_ete import get_dr_mcp_server_url
25
26
  from datarobot_genai.drmcp.test_utils.mcp_utils_ete import get_headers
@@ -66,11 +67,12 @@ __all__ = [
66
67
  "extract_datarobot_runtime_param_payload",
67
68
  "extract_datarobot_dict_runtime_param_payload",
68
69
  "extract_datarobot_credential_runtime_param_payload",
70
+ "filter_tools_by_tags",
69
71
  # Test utilities
70
72
  "get_dr_mcp_server_url",
71
73
  "get_headers",
72
74
  "ete_test_mcp_session",
73
- "OpenAILLMMCPClient",
75
+ "DRLLMGatewayMCPClient",
74
76
  "ETETestExpectations",
75
77
  "ToolBaseE2E",
76
78
  "ToolCallTestExpectations",
@@ -76,7 +76,25 @@ class MCPToolConfig(BaseSettings):
76
76
  RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_MICROSOFT_GRAPH_TOOLS",
77
77
  "ENABLE_MICROSOFT_GRAPH_TOOLS",
78
78
  ),
79
- description="Enable/disable Sharepoint tools",
79
+ description="Enable/disable Microsoft Graph (Sharepoint/OneDrive) tools",
80
+ )
81
+
82
+ enable_perplexity_tools: bool = Field(
83
+ default=False,
84
+ validation_alias=AliasChoices(
85
+ RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_PERPLEXITY_TOOLS",
86
+ "ENABLE_PERPLEXITY_TOOLS",
87
+ ),
88
+ description="Enable/disable Perplexity tools",
89
+ )
90
+
91
+ enable_tavily_tools: bool = Field(
92
+ default=False,
93
+ validation_alias=AliasChoices(
94
+ RUNTIME_PARAM_ENV_VAR_NAME_PREFIX + "ENABLE_TAVILY_TOOLS",
95
+ "ENABLE_TAVILY_TOOLS",
96
+ ),
97
+ description="Enable/disable Tavily search tools",
80
98
  )
81
99
 
82
100
  is_atlassian_oauth_provider_configured: bool = Field(
@@ -131,6 +149,8 @@ class MCPToolConfig(BaseSettings):
131
149
  "enable_confluence_tools",
132
150
  "enable_gdrive_tools",
133
151
  "enable_microsoft_graph_tools",
152
+ "enable_perplexity_tools",
153
+ "enable_tavily_tools",
134
154
  "is_atlassian_oauth_provider_configured",
135
155
  "is_google_oauth_provider_configured",
136
156
  "is_microsoft_oauth_provider_configured",