microsoft-agents-a365-tooling-extensions-openai 0.2.0.dev5__py3-none-any.whl → 0.2.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microsoft_agents_a365/tooling/extensions/openai/__init__.py +13 -2
- microsoft_agents_a365/tooling/extensions/openai/mcp_tool_registration_service.py +464 -18
- {microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info → microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info}/METADATA +1 -1
- microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info/RECORD +6 -0
- {microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info → microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info}/WHEEL +1 -1
- {microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info → microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info}/top_level.txt +1 -0
- microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info/RECORD +0 -6
|
@@ -2,10 +2,21 @@
|
|
|
2
2
|
# Licensed under the MIT License.
|
|
3
3
|
|
|
4
4
|
"""
|
|
5
|
-
OpenAI extensions for Microsoft Agent 365 Tooling SDK
|
|
5
|
+
OpenAI extensions for Microsoft Agent 365 Tooling SDK.
|
|
6
6
|
|
|
7
7
|
Tooling and utilities specifically for OpenAI framework integration.
|
|
8
|
-
Provides OpenAI-specific helper utilities
|
|
8
|
+
Provides OpenAI-specific helper utilities including:
|
|
9
|
+
- McpToolRegistrationService: Service for MCP tool registration and chat history management
|
|
10
|
+
|
|
11
|
+
For type hints, use the types directly from the OpenAI Agents SDK:
|
|
12
|
+
- agents.memory.Session: Protocol for session objects
|
|
13
|
+
- agents.items.TResponseInputItem: Type for input message items
|
|
9
14
|
"""
|
|
10
15
|
|
|
16
|
+
from .mcp_tool_registration_service import McpToolRegistrationService
|
|
17
|
+
|
|
11
18
|
__version__ = "1.0.0"
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"McpToolRegistrationService",
|
|
22
|
+
]
|
|
@@ -1,28 +1,40 @@
|
|
|
1
|
-
# Copyright (c) Microsoft
|
|
1
|
+
# Copyright (c) Microsoft Corporation.
|
|
2
|
+
# Licensed under the MIT License.
|
|
2
3
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
import logging
|
|
4
|
+
"""
|
|
5
|
+
MCP Tool Registration Service for OpenAI.
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
This module provides OpenAI-specific extensions for MCP tool registration,
|
|
8
|
+
including methods to send chat history from OpenAI Sessions and message lists.
|
|
9
|
+
"""
|
|
8
10
|
|
|
9
|
-
|
|
11
|
+
import logging
|
|
12
|
+
import uuid
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from typing import Dict, List, Optional
|
|
10
16
|
|
|
17
|
+
from agents import Agent
|
|
18
|
+
from agents.items import TResponseInputItem
|
|
11
19
|
from agents.mcp import (
|
|
12
20
|
MCPServerStreamableHttp,
|
|
13
21
|
MCPServerStreamableHttpParams,
|
|
14
22
|
)
|
|
23
|
+
from agents.memory import Session
|
|
24
|
+
from microsoft_agents.hosting.core import Authorization, TurnContext
|
|
25
|
+
|
|
26
|
+
from microsoft_agents_a365.runtime import OperationError, OperationResult
|
|
15
27
|
from microsoft_agents_a365.runtime.utility import Utility
|
|
28
|
+
from microsoft_agents_a365.tooling.models import ChatHistoryMessage, ToolOptions
|
|
16
29
|
from microsoft_agents_a365.tooling.services.mcp_tool_server_configuration_service import (
|
|
17
30
|
McpToolServerConfigurationService,
|
|
18
31
|
)
|
|
19
|
-
|
|
32
|
+
from microsoft_agents_a365.tooling.utils.constants import Constants
|
|
20
33
|
from microsoft_agents_a365.tooling.utils.utility import (
|
|
21
34
|
get_mcp_platform_authentication_scope,
|
|
22
35
|
)
|
|
23
36
|
|
|
24
37
|
|
|
25
|
-
# TODO: This is not needed. Remove this.
|
|
26
38
|
@dataclass
|
|
27
39
|
class MCPServerInfo:
|
|
28
40
|
"""Information about an MCP server"""
|
|
@@ -38,6 +50,8 @@ class MCPServerInfo:
|
|
|
38
50
|
class McpToolRegistrationService:
|
|
39
51
|
"""Service for managing MCP tools and servers for an agent"""
|
|
40
52
|
|
|
53
|
+
_orchestrator_name: str = "OpenAI"
|
|
54
|
+
|
|
41
55
|
def __init__(self, logger: Optional[logging.Logger] = None):
|
|
42
56
|
"""
|
|
43
57
|
Initialize the MCP Tool Registration Service for OpenAI.
|
|
@@ -55,7 +69,7 @@ class McpToolRegistrationService:
|
|
|
55
69
|
auth_handler_name: str,
|
|
56
70
|
context: TurnContext,
|
|
57
71
|
auth_token: Optional[str] = None,
|
|
58
|
-
):
|
|
72
|
+
) -> Agent:
|
|
59
73
|
"""
|
|
60
74
|
Add new MCP servers to the agent by creating a new Agent instance.
|
|
61
75
|
|
|
@@ -74,7 +88,7 @@ class McpToolRegistrationService:
|
|
|
74
88
|
New Agent instance with all MCP servers, or original agent if no new servers
|
|
75
89
|
"""
|
|
76
90
|
|
|
77
|
-
if
|
|
91
|
+
if auth_token is None or auth_token.strip() == "":
|
|
78
92
|
scopes = get_mcp_platform_authentication_scope()
|
|
79
93
|
authToken = await auth.exchange_token(context, scopes, auth_handler_name)
|
|
80
94
|
auth_token = authToken.token
|
|
@@ -83,11 +97,13 @@ class McpToolRegistrationService:
|
|
|
83
97
|
# mcp_server_configs = []
|
|
84
98
|
# TODO: radevika: Update once the common project is merged.
|
|
85
99
|
|
|
100
|
+
options = ToolOptions(orchestrator_name=self._orchestrator_name)
|
|
86
101
|
agentic_app_id = Utility.resolve_agent_identity(context, auth_token)
|
|
87
102
|
self._logger.info(f"Listing MCP tool servers for agent {agentic_app_id}")
|
|
88
103
|
mcp_server_configs = await self.config_service.list_tool_servers(
|
|
89
104
|
agentic_app_id=agentic_app_id,
|
|
90
105
|
auth_token=auth_token,
|
|
106
|
+
options=options,
|
|
91
107
|
)
|
|
92
108
|
|
|
93
109
|
self._logger.info(f"Loaded {len(mcp_server_configs)} MCP server configurations")
|
|
@@ -95,9 +111,13 @@ class McpToolRegistrationService:
|
|
|
95
111
|
# Convert MCP server configs to MCPServerInfo objects
|
|
96
112
|
mcp_servers_info = []
|
|
97
113
|
for server_config in mcp_server_configs:
|
|
114
|
+
# Use mcp_server_name if available (not None or empty), otherwise fall back to mcp_server_unique_name
|
|
115
|
+
server_name = server_config.mcp_server_name or server_config.mcp_server_unique_name
|
|
116
|
+
# Use the URL from config (always populated by the configuration service)
|
|
117
|
+
server_url = server_config.url
|
|
98
118
|
server_info = MCPServerInfo(
|
|
99
|
-
name=
|
|
100
|
-
url=
|
|
119
|
+
name=server_name,
|
|
120
|
+
url=server_url,
|
|
101
121
|
)
|
|
102
122
|
mcp_servers_info.append(server_info)
|
|
103
123
|
|
|
@@ -132,7 +152,13 @@ class McpToolRegistrationService:
|
|
|
132
152
|
# Prepare headers with authorization
|
|
133
153
|
headers = si.headers or {}
|
|
134
154
|
if auth_token:
|
|
135
|
-
headers[
|
|
155
|
+
headers[Constants.Headers.AUTHORIZATION] = (
|
|
156
|
+
f"{Constants.Headers.BEARER_PREFIX} {auth_token}"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
headers[Constants.Headers.USER_AGENT] = Utility.get_user_agent_header(
|
|
160
|
+
self._orchestrator_name
|
|
161
|
+
)
|
|
136
162
|
|
|
137
163
|
# Create MCPServerStreamableHttpParams with proper configuration
|
|
138
164
|
params = MCPServerStreamableHttpParams(url=si.url, headers=headers)
|
|
@@ -168,8 +194,6 @@ class McpToolRegistrationService:
|
|
|
168
194
|
all_mcp_servers = existing_mcp_servers + new_mcp_servers
|
|
169
195
|
|
|
170
196
|
# Recreate the agent with all MCP servers
|
|
171
|
-
from agents import Agent
|
|
172
|
-
|
|
173
197
|
new_agent = Agent(
|
|
174
198
|
name=agent.name,
|
|
175
199
|
model=agent.model,
|
|
@@ -201,12 +225,12 @@ class McpToolRegistrationService:
|
|
|
201
225
|
# Clean up connected servers if agent creation fails
|
|
202
226
|
self._logger.error(f"Failed to recreate agent with new MCP servers: {e}")
|
|
203
227
|
await self._cleanup_servers(connected_servers)
|
|
204
|
-
raise
|
|
228
|
+
raise
|
|
205
229
|
|
|
206
230
|
self._logger.info("No new MCP servers to add to agent")
|
|
207
231
|
return agent
|
|
208
232
|
|
|
209
|
-
async def _cleanup_servers(self, servers):
|
|
233
|
+
async def _cleanup_servers(self, servers: List[MCPServerStreamableHttp]) -> None:
|
|
210
234
|
"""Clean up connected MCP servers"""
|
|
211
235
|
for server in servers:
|
|
212
236
|
try:
|
|
@@ -216,8 +240,430 @@ class McpToolRegistrationService:
|
|
|
216
240
|
# Log cleanup errors but don't raise them
|
|
217
241
|
self._logger.debug(f"Error during server cleanup: {e}")
|
|
218
242
|
|
|
219
|
-
async def cleanup_all_servers(self):
|
|
243
|
+
async def cleanup_all_servers(self) -> None:
|
|
220
244
|
"""Clean up all connected MCP servers"""
|
|
221
245
|
if hasattr(self, "_connected_servers"):
|
|
222
246
|
await self._cleanup_servers(self._connected_servers)
|
|
223
247
|
self._connected_servers = []
|
|
248
|
+
|
|
249
|
+
# --------------------------------------------------------------------------
|
|
250
|
+
# SEND CHAT HISTORY - OpenAI-specific implementations
|
|
251
|
+
# --------------------------------------------------------------------------
|
|
252
|
+
|
|
253
|
+
async def send_chat_history(
|
|
254
|
+
self,
|
|
255
|
+
turn_context: TurnContext,
|
|
256
|
+
session: Session,
|
|
257
|
+
limit: Optional[int] = None,
|
|
258
|
+
options: Optional[ToolOptions] = None,
|
|
259
|
+
) -> OperationResult:
|
|
260
|
+
"""
|
|
261
|
+
Extract chat history from an OpenAI Session and send it to the MCP platform.
|
|
262
|
+
|
|
263
|
+
This method extracts messages from an OpenAI Session object using get_items()
|
|
264
|
+
and sends them to the MCP platform for real-time threat protection.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
turn_context: TurnContext from the Agents SDK containing conversation info.
|
|
268
|
+
Must have a valid activity with conversation.id, activity.id,
|
|
269
|
+
and activity.text.
|
|
270
|
+
session: OpenAI Session instance to extract messages from. Must support
|
|
271
|
+
the get_items() method which returns a list of TResponseInputItem.
|
|
272
|
+
limit: Optional maximum number of items to retrieve from session.
|
|
273
|
+
If None, retrieves all items.
|
|
274
|
+
options: Optional ToolOptions for customization. If not provided,
|
|
275
|
+
uses default options with orchestrator_name="OpenAI".
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
OperationResult indicating success or failure. On success, returns
|
|
279
|
+
OperationResult.success(). On failure, returns OperationResult.failed()
|
|
280
|
+
with error details.
|
|
281
|
+
|
|
282
|
+
Raises:
|
|
283
|
+
ValueError: If turn_context is None or session is None.
|
|
284
|
+
|
|
285
|
+
Example:
|
|
286
|
+
>>> from agents import Agent, Runner
|
|
287
|
+
>>> from microsoft_agents_a365.tooling.extensions.openai import (
|
|
288
|
+
... McpToolRegistrationService
|
|
289
|
+
... )
|
|
290
|
+
>>>
|
|
291
|
+
>>> service = McpToolRegistrationService()
|
|
292
|
+
>>> agent = Agent(name="my-agent", model="gpt-4")
|
|
293
|
+
>>>
|
|
294
|
+
>>> # In your agent handler:
|
|
295
|
+
>>> async with Runner.run(agent, messages) as result:
|
|
296
|
+
... session = result.session
|
|
297
|
+
... op_result = await service.send_chat_history(
|
|
298
|
+
... turn_context, session
|
|
299
|
+
... )
|
|
300
|
+
... if op_result.succeeded:
|
|
301
|
+
... print("Chat history sent successfully")
|
|
302
|
+
"""
|
|
303
|
+
# Validate inputs
|
|
304
|
+
if turn_context is None:
|
|
305
|
+
raise ValueError("turn_context cannot be None")
|
|
306
|
+
if session is None:
|
|
307
|
+
raise ValueError("session cannot be None")
|
|
308
|
+
|
|
309
|
+
try:
|
|
310
|
+
# Extract messages from session
|
|
311
|
+
self._logger.info("Extracting messages from OpenAI session")
|
|
312
|
+
if limit is not None:
|
|
313
|
+
messages = session.get_items(limit=limit)
|
|
314
|
+
else:
|
|
315
|
+
messages = session.get_items()
|
|
316
|
+
|
|
317
|
+
self._logger.debug(f"Retrieved {len(messages)} items from session")
|
|
318
|
+
|
|
319
|
+
# Delegate to the list-based method
|
|
320
|
+
return await self.send_chat_history_messages(
|
|
321
|
+
turn_context=turn_context,
|
|
322
|
+
messages=messages,
|
|
323
|
+
options=options,
|
|
324
|
+
)
|
|
325
|
+
except ValueError:
|
|
326
|
+
# Re-raise validation errors
|
|
327
|
+
raise
|
|
328
|
+
except Exception as ex:
|
|
329
|
+
self._logger.error(f"Failed to send chat history from session: {ex}")
|
|
330
|
+
return OperationResult.failed(OperationError(ex))
|
|
331
|
+
|
|
332
|
+
async def send_chat_history_messages(
|
|
333
|
+
self,
|
|
334
|
+
turn_context: TurnContext,
|
|
335
|
+
messages: List[TResponseInputItem],
|
|
336
|
+
options: Optional[ToolOptions] = None,
|
|
337
|
+
) -> OperationResult:
|
|
338
|
+
"""
|
|
339
|
+
Send OpenAI chat history messages to the MCP platform for threat protection.
|
|
340
|
+
|
|
341
|
+
This method accepts a list of OpenAI TResponseInputItem messages, converts
|
|
342
|
+
them to ChatHistoryMessage format, and sends them to the MCP platform.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
turn_context: TurnContext from the Agents SDK containing conversation info.
|
|
346
|
+
Must have a valid activity with conversation.id, activity.id,
|
|
347
|
+
and activity.text.
|
|
348
|
+
messages: List of OpenAI TResponseInputItem messages to send. Supports
|
|
349
|
+
UserMessage, AssistantMessage, SystemMessage, and other OpenAI
|
|
350
|
+
message types.
|
|
351
|
+
options: Optional ToolOptions for customization. If not provided,
|
|
352
|
+
uses default options with orchestrator_name="OpenAI".
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
OperationResult indicating success or failure. On success, returns
|
|
356
|
+
OperationResult.success(). On failure, returns OperationResult.failed()
|
|
357
|
+
with error details.
|
|
358
|
+
|
|
359
|
+
Raises:
|
|
360
|
+
ValueError: If turn_context is None or messages is None.
|
|
361
|
+
|
|
362
|
+
Example:
|
|
363
|
+
>>> from microsoft_agents_a365.tooling.extensions.openai import (
|
|
364
|
+
... McpToolRegistrationService
|
|
365
|
+
... )
|
|
366
|
+
>>>
|
|
367
|
+
>>> service = McpToolRegistrationService()
|
|
368
|
+
>>> messages = [
|
|
369
|
+
... {"role": "user", "content": "Hello"},
|
|
370
|
+
... {"role": "assistant", "content": "Hi there!"},
|
|
371
|
+
... ]
|
|
372
|
+
>>>
|
|
373
|
+
>>> result = await service.send_chat_history_messages(
|
|
374
|
+
... turn_context, messages
|
|
375
|
+
... )
|
|
376
|
+
>>> if result.succeeded:
|
|
377
|
+
... print("Chat history sent successfully")
|
|
378
|
+
"""
|
|
379
|
+
# Validate inputs
|
|
380
|
+
if turn_context is None:
|
|
381
|
+
raise ValueError("turn_context cannot be None")
|
|
382
|
+
if messages is None:
|
|
383
|
+
raise ValueError("messages cannot be None")
|
|
384
|
+
|
|
385
|
+
# Handle empty list as no-op
|
|
386
|
+
if len(messages) == 0:
|
|
387
|
+
self._logger.info("Empty message list provided, returning success")
|
|
388
|
+
return OperationResult.success()
|
|
389
|
+
|
|
390
|
+
self._logger.info(f"Sending {len(messages)} OpenAI messages as chat history")
|
|
391
|
+
|
|
392
|
+
# Set default options
|
|
393
|
+
if options is None:
|
|
394
|
+
options = ToolOptions(orchestrator_name=self._orchestrator_name)
|
|
395
|
+
elif options.orchestrator_name is None:
|
|
396
|
+
options.orchestrator_name = self._orchestrator_name
|
|
397
|
+
|
|
398
|
+
try:
|
|
399
|
+
# Convert OpenAI messages to ChatHistoryMessage format
|
|
400
|
+
chat_history_messages = self._convert_openai_messages_to_chat_history(messages)
|
|
401
|
+
|
|
402
|
+
if len(chat_history_messages) == 0:
|
|
403
|
+
self._logger.warning("No messages could be converted to chat history format")
|
|
404
|
+
return OperationResult.success()
|
|
405
|
+
|
|
406
|
+
self._logger.debug(
|
|
407
|
+
f"Converted {len(chat_history_messages)} messages to ChatHistoryMessage format"
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
# Delegate to core service
|
|
411
|
+
return await self.config_service.send_chat_history(
|
|
412
|
+
turn_context=turn_context,
|
|
413
|
+
chat_history_messages=chat_history_messages,
|
|
414
|
+
options=options,
|
|
415
|
+
)
|
|
416
|
+
except ValueError:
|
|
417
|
+
# Re-raise validation errors from the core service
|
|
418
|
+
raise
|
|
419
|
+
except Exception as ex:
|
|
420
|
+
self._logger.error(f"Failed to send chat history messages: {ex}")
|
|
421
|
+
return OperationResult.failed(OperationError(ex))
|
|
422
|
+
|
|
423
|
+
# --------------------------------------------------------------------------
|
|
424
|
+
# PRIVATE HELPER METHODS - Message Conversion
|
|
425
|
+
# --------------------------------------------------------------------------
|
|
426
|
+
|
|
427
|
+
def _convert_openai_messages_to_chat_history(
|
|
428
|
+
self, messages: List[TResponseInputItem]
|
|
429
|
+
) -> List[ChatHistoryMessage]:
|
|
430
|
+
"""
|
|
431
|
+
Convert a list of OpenAI messages to ChatHistoryMessage format.
|
|
432
|
+
|
|
433
|
+
Args:
|
|
434
|
+
messages: List of OpenAI TResponseInputItem messages.
|
|
435
|
+
|
|
436
|
+
Returns:
|
|
437
|
+
List of ChatHistoryMessage objects. Messages that cannot be converted
|
|
438
|
+
are filtered out with a warning log.
|
|
439
|
+
"""
|
|
440
|
+
chat_history_messages: List[ChatHistoryMessage] = []
|
|
441
|
+
|
|
442
|
+
for idx, message in enumerate(messages):
|
|
443
|
+
converted = self._convert_single_message(message, idx)
|
|
444
|
+
if converted is not None:
|
|
445
|
+
chat_history_messages.append(converted)
|
|
446
|
+
|
|
447
|
+
self._logger.info(
|
|
448
|
+
f"Converted {len(chat_history_messages)} of {len(messages)} messages "
|
|
449
|
+
"to ChatHistoryMessage format"
|
|
450
|
+
)
|
|
451
|
+
return chat_history_messages
|
|
452
|
+
|
|
453
|
+
def _convert_single_message(
|
|
454
|
+
self, message: TResponseInputItem, index: int = 0
|
|
455
|
+
) -> Optional[ChatHistoryMessage]:
|
|
456
|
+
"""
|
|
457
|
+
Convert a single OpenAI message to ChatHistoryMessage format.
|
|
458
|
+
|
|
459
|
+
Args:
|
|
460
|
+
message: Single OpenAI TResponseInputItem message.
|
|
461
|
+
index: Index of the message in the list (for logging).
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
ChatHistoryMessage object or None if conversion fails.
|
|
465
|
+
"""
|
|
466
|
+
try:
|
|
467
|
+
role = self._extract_role(message)
|
|
468
|
+
content = self._extract_content(message)
|
|
469
|
+
msg_id = self._extract_id(message)
|
|
470
|
+
timestamp = self._extract_timestamp(message)
|
|
471
|
+
|
|
472
|
+
self._logger.debug(
|
|
473
|
+
f"Converting message {index}: role={role}, "
|
|
474
|
+
f"has_id={msg_id is not None}, has_timestamp={timestamp is not None}"
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Skip messages with empty content after extraction
|
|
478
|
+
# The ChatHistoryMessage validator requires non-empty content
|
|
479
|
+
if not content or not content.strip():
|
|
480
|
+
self._logger.warning(f"Message {index} has empty content, skipping")
|
|
481
|
+
return None
|
|
482
|
+
|
|
483
|
+
return ChatHistoryMessage(
|
|
484
|
+
id=msg_id,
|
|
485
|
+
role=role,
|
|
486
|
+
content=content,
|
|
487
|
+
timestamp=timestamp,
|
|
488
|
+
)
|
|
489
|
+
except Exception as ex:
|
|
490
|
+
self._logger.error(f"Failed to convert message {index}: {ex}")
|
|
491
|
+
return None
|
|
492
|
+
|
|
493
|
+
def _extract_role(self, message: TResponseInputItem) -> str:
|
|
494
|
+
"""
|
|
495
|
+
Extract the role from an OpenAI message.
|
|
496
|
+
|
|
497
|
+
Role mapping:
|
|
498
|
+
- UserMessage or role="user" -> "user"
|
|
499
|
+
- AssistantMessage or role="assistant" -> "assistant"
|
|
500
|
+
- SystemMessage or role="system" -> "system"
|
|
501
|
+
- ResponseOutputMessage with role="assistant" -> "assistant"
|
|
502
|
+
- Unknown types -> "user" (default fallback with warning)
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
message: OpenAI message object.
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
Role string: "user", "assistant", or "system".
|
|
509
|
+
"""
|
|
510
|
+
# Check for role attribute directly
|
|
511
|
+
if hasattr(message, "role"):
|
|
512
|
+
role = message.role
|
|
513
|
+
if role in ("user", "assistant", "system"):
|
|
514
|
+
return role
|
|
515
|
+
|
|
516
|
+
# Check message type by class name
|
|
517
|
+
type_name = type(message).__name__
|
|
518
|
+
|
|
519
|
+
if "UserMessage" in type_name or "user" in type_name.lower():
|
|
520
|
+
return "user"
|
|
521
|
+
elif "AssistantMessage" in type_name or "assistant" in type_name.lower():
|
|
522
|
+
return "assistant"
|
|
523
|
+
elif "SystemMessage" in type_name or "system" in type_name.lower():
|
|
524
|
+
return "system"
|
|
525
|
+
elif "ResponseOutputMessage" in type_name:
|
|
526
|
+
# ResponseOutputMessage typically has role attribute
|
|
527
|
+
if hasattr(message, "role") and message.role == "assistant":
|
|
528
|
+
return "assistant"
|
|
529
|
+
return "assistant" # Default for response output
|
|
530
|
+
|
|
531
|
+
# For dict-like objects
|
|
532
|
+
if isinstance(message, dict):
|
|
533
|
+
role = message.get("role", "")
|
|
534
|
+
if role in ("user", "assistant", "system"):
|
|
535
|
+
return role
|
|
536
|
+
|
|
537
|
+
# Default fallback with warning
|
|
538
|
+
self._logger.warning(f"Unknown message type {type_name}, defaulting to 'user' role")
|
|
539
|
+
return "user"
|
|
540
|
+
|
|
541
|
+
def _extract_content(self, message: TResponseInputItem) -> str:
|
|
542
|
+
"""
|
|
543
|
+
Extract text content from an OpenAI message.
|
|
544
|
+
|
|
545
|
+
Content extraction priority:
|
|
546
|
+
1. If message has .content as string -> use directly
|
|
547
|
+
2. If message has .content as list -> concatenate all text parts
|
|
548
|
+
3. If message has .text attribute -> use directly
|
|
549
|
+
4. If content is empty/None -> return empty string with warning
|
|
550
|
+
|
|
551
|
+
Args:
|
|
552
|
+
message: OpenAI message object.
|
|
553
|
+
|
|
554
|
+
Returns:
|
|
555
|
+
Extracted text content as string.
|
|
556
|
+
"""
|
|
557
|
+
content = ""
|
|
558
|
+
|
|
559
|
+
# Try .content attribute first
|
|
560
|
+
if hasattr(message, "content"):
|
|
561
|
+
raw_content = message.content
|
|
562
|
+
|
|
563
|
+
if isinstance(raw_content, str):
|
|
564
|
+
content = raw_content
|
|
565
|
+
elif isinstance(raw_content, list):
|
|
566
|
+
# Concatenate text parts from content list
|
|
567
|
+
text_parts = []
|
|
568
|
+
for part in raw_content:
|
|
569
|
+
if isinstance(part, str):
|
|
570
|
+
text_parts.append(part)
|
|
571
|
+
elif hasattr(part, "text"):
|
|
572
|
+
text_parts.append(str(part.text))
|
|
573
|
+
elif isinstance(part, dict):
|
|
574
|
+
if "text" in part:
|
|
575
|
+
text_parts.append(str(part["text"]))
|
|
576
|
+
elif part.get("type") == "text" and "text" in part:
|
|
577
|
+
text_parts.append(str(part["text"]))
|
|
578
|
+
content = " ".join(text_parts)
|
|
579
|
+
|
|
580
|
+
# Try .text attribute as fallback
|
|
581
|
+
if not content and hasattr(message, "text"):
|
|
582
|
+
content = str(message.text) if message.text else ""
|
|
583
|
+
|
|
584
|
+
# Try dict-like access
|
|
585
|
+
if not content and isinstance(message, dict):
|
|
586
|
+
content = message.get("content", "") or message.get("text", "") or ""
|
|
587
|
+
if isinstance(content, list):
|
|
588
|
+
text_parts = []
|
|
589
|
+
for part in content:
|
|
590
|
+
if isinstance(part, str):
|
|
591
|
+
text_parts.append(part)
|
|
592
|
+
elif isinstance(part, dict) and "text" in part:
|
|
593
|
+
text_parts.append(str(part["text"]))
|
|
594
|
+
content = " ".join(text_parts)
|
|
595
|
+
|
|
596
|
+
if not content:
|
|
597
|
+
self._logger.warning("Message has empty content, using empty string")
|
|
598
|
+
|
|
599
|
+
return content
|
|
600
|
+
|
|
601
|
+
def _extract_id(self, message: TResponseInputItem) -> str:
|
|
602
|
+
"""
|
|
603
|
+
Extract or generate a unique ID for the message.
|
|
604
|
+
|
|
605
|
+
If the message has an existing ID, it is preserved. Otherwise,
|
|
606
|
+
a new UUID is generated.
|
|
607
|
+
|
|
608
|
+
Args:
|
|
609
|
+
message: OpenAI message object.
|
|
610
|
+
|
|
611
|
+
Returns:
|
|
612
|
+
Message ID as string.
|
|
613
|
+
"""
|
|
614
|
+
# Try to get existing ID
|
|
615
|
+
existing_id = None
|
|
616
|
+
|
|
617
|
+
if hasattr(message, "id") and message.id:
|
|
618
|
+
existing_id = str(message.id)
|
|
619
|
+
elif isinstance(message, dict) and message.get("id"):
|
|
620
|
+
existing_id = str(message["id"])
|
|
621
|
+
|
|
622
|
+
if existing_id:
|
|
623
|
+
return existing_id
|
|
624
|
+
|
|
625
|
+
# Generate new UUID
|
|
626
|
+
generated_id = str(uuid.uuid4())
|
|
627
|
+
self._logger.debug(f"Generated UUID {generated_id} for message without ID")
|
|
628
|
+
return generated_id
|
|
629
|
+
|
|
630
|
+
def _extract_timestamp(self, message: TResponseInputItem) -> datetime:
|
|
631
|
+
"""
|
|
632
|
+
Extract or generate a timestamp for the message.
|
|
633
|
+
|
|
634
|
+
If the message has an existing timestamp, it is preserved. Otherwise,
|
|
635
|
+
the current UTC time is used.
|
|
636
|
+
|
|
637
|
+
Args:
|
|
638
|
+
message: OpenAI message object.
|
|
639
|
+
|
|
640
|
+
Returns:
|
|
641
|
+
Timestamp as datetime object.
|
|
642
|
+
"""
|
|
643
|
+
# Try to get existing timestamp
|
|
644
|
+
existing_timestamp = None
|
|
645
|
+
|
|
646
|
+
if hasattr(message, "timestamp") and message.timestamp:
|
|
647
|
+
existing_timestamp = message.timestamp
|
|
648
|
+
elif hasattr(message, "created_at") and message.created_at:
|
|
649
|
+
existing_timestamp = message.created_at
|
|
650
|
+
elif isinstance(message, dict):
|
|
651
|
+
existing_timestamp = message.get("timestamp") or message.get("created_at")
|
|
652
|
+
|
|
653
|
+
if existing_timestamp:
|
|
654
|
+
# Convert to datetime if needed
|
|
655
|
+
if isinstance(existing_timestamp, datetime):
|
|
656
|
+
return existing_timestamp
|
|
657
|
+
elif isinstance(existing_timestamp, (int, float)):
|
|
658
|
+
# Unix timestamp
|
|
659
|
+
return datetime.fromtimestamp(existing_timestamp, tz=timezone.utc)
|
|
660
|
+
elif isinstance(existing_timestamp, str):
|
|
661
|
+
# Try ISO format parsing
|
|
662
|
+
try:
|
|
663
|
+
return datetime.fromisoformat(existing_timestamp.replace("Z", "+00:00"))
|
|
664
|
+
except ValueError:
|
|
665
|
+
pass
|
|
666
|
+
|
|
667
|
+
# Use current UTC time
|
|
668
|
+
self._logger.debug("Using current UTC time for message without timestamp")
|
|
669
|
+
return datetime.now(timezone.utc)
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
microsoft_agents_a365/tooling/extensions/openai/__init__.py,sha256=3x9ncXL9mr7VXW32go9Nu4biFAaJ1BK82vaHCFSSvvM,681
|
|
2
|
+
microsoft_agents_a365/tooling/extensions/openai/mcp_tool_registration_service.py,sha256=Gz4IyOUSvZYUibkOnf9Pi_oIUvYG0rM85lI6rmATMu4,26837
|
|
3
|
+
microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info/METADATA,sha256=XogED2Efd5-Yy-8Y_Aju0jPCsgh324RpGyupQQwp8mE,3288
|
|
4
|
+
microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
5
|
+
microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info/top_level.txt,sha256=m90AvzRnjbL6fpi20mzOj6HUVkR2LWuf2JuXm4LL9LU,27
|
|
6
|
+
microsoft_agents_a365_tooling_extensions_openai-0.2.1.dev0.dist-info/RECORD,,
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
microsoft_agents_a365/tooling/extensions/openai/__init__.py,sha256=16FpUt9TQMKA7lRcRin9si0EJBZNAEGxV5WHbncTX_Y,272
|
|
2
|
-
microsoft_agents_a365/tooling/extensions/openai/mcp_tool_registration_service.py,sha256=NMybbP3VvOXX26SlAPifbJgIIy3te5xnkbm7FHz0Pp8,8948
|
|
3
|
-
microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info/METADATA,sha256=tshLHuGhD2THUYV6oLPsUe-gRyN1e9UMzRSbTMfKES0,3288
|
|
4
|
-
microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
5
|
-
microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info/top_level.txt,sha256=G3c2_4sy5_EM_BWO67SbK2tKj4G8XFn-QXRbh8g9Lgk,22
|
|
6
|
-
microsoft_agents_a365_tooling_extensions_openai-0.2.0.dev5.dist-info/RECORD,,
|