microsoft-agents-hosting-core 0.7.0__py3-none-any.whl → 0.7.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. microsoft_agents/hosting/core/__init__.py +1 -24
  2. microsoft_agents/hosting/core/app/agent_application.py +24 -12
  3. microsoft_agents/hosting/core/app/oauth/_sign_in_state.py +15 -7
  4. microsoft_agents/hosting/core/authorization/claims_identity.py +0 -14
  5. microsoft_agents/hosting/core/channel_service_adapter.py +14 -28
  6. microsoft_agents/hosting/core/connector/client/connector_client.py +1 -0
  7. microsoft_agents/hosting/core/connector/client/user_token_client.py +1 -0
  8. microsoft_agents/hosting/core/rest_channel_service_client_factory.py +2 -2
  9. microsoft_agents/hosting/core/storage/memory_storage.py +1 -0
  10. microsoft_agents/hosting/core/storage/storage.py +1 -0
  11. microsoft_agents/hosting/core/storage/transcript_logger.py +1 -0
  12. {microsoft_agents_hosting_core-0.7.0.dist-info → microsoft_agents_hosting_core-0.7.0.dev0.dist-info}/METADATA +3 -21
  13. {microsoft_agents_hosting_core-0.7.0.dist-info → microsoft_agents_hosting_core-0.7.0.dev0.dist-info}/RECORD +16 -25
  14. microsoft_agents/hosting/core/app/streaming/__init__.py +0 -14
  15. microsoft_agents/hosting/core/app/streaming/citation.py +0 -22
  16. microsoft_agents/hosting/core/app/streaming/citation_util.py +0 -85
  17. microsoft_agents/hosting/core/app/streaming/streaming_response.py +0 -411
  18. microsoft_agents/hosting/core/http/__init__.py +0 -17
  19. microsoft_agents/hosting/core/http/_channel_service_routes.py +0 -202
  20. microsoft_agents/hosting/core/http/_http_adapter_base.py +0 -136
  21. microsoft_agents/hosting/core/http/_http_request_protocol.py +0 -36
  22. microsoft_agents/hosting/core/http/_http_response.py +0 -56
  23. {microsoft_agents_hosting_core-0.7.0.dist-info → microsoft_agents_hosting_core-0.7.0.dev0.dist-info}/WHEEL +0 -0
  24. {microsoft_agents_hosting_core-0.7.0.dist-info → microsoft_agents_hosting_core-0.7.0.dev0.dist-info}/licenses/LICENSE +0 -0
  25. {microsoft_agents_hosting_core-0.7.0.dist-info → microsoft_agents_hosting_core-0.7.0.dev0.dist-info}/top_level.txt +0 -0
@@ -1,411 +0,0 @@
1
- # Copyright (c) Microsoft Corporation. All rights reserved.
2
- # Licensed under the MIT License.
3
-
4
- import asyncio
5
- import logging
6
- from typing import List, Optional, Callable, Literal, TYPE_CHECKING
7
-
8
- from microsoft_agents.activity import (
9
- Activity,
10
- Entity,
11
- Attachment,
12
- Channels,
13
- ClientCitation,
14
- DeliveryModes,
15
- SensitivityUsageInfo,
16
- )
17
-
18
- if TYPE_CHECKING:
19
- from microsoft_agents.hosting.core.turn_context import TurnContext
20
-
21
- from .citation import Citation
22
- from .citation_util import CitationUtil
23
-
24
- logger = logging.getLogger(__name__)
25
-
26
-
27
- class StreamingResponse:
28
- """
29
- A helper class for streaming responses to the client.
30
-
31
- This class is used to send a series of updates to the client in a single response.
32
- The expected sequence of calls is:
33
-
34
- `queue_informative_update()`, `queue_text_chunk()`, `queue_text_chunk()`, ..., `end_stream()`.
35
-
36
- Once `end_stream()` is called, the stream is considered ended and no further updates can be sent.
37
- """
38
-
39
- def __init__(self, context: "TurnContext"):
40
- """
41
- Creates a new StreamingResponse instance.
42
-
43
- Args:
44
- context: Context for the current turn of conversation with the user.
45
- """
46
- self._context = context
47
- self._sequence_number = 1
48
- self._stream_id: Optional[str] = None
49
- self._message = ""
50
- self._attachments: Optional[List[Attachment]] = None
51
- self._ended = False
52
- self._cancelled = False
53
-
54
- # Queue for outgoing activities
55
- self._queue: List[Callable[[], Activity]] = []
56
- self._queue_sync: Optional[asyncio.Task] = None
57
- self._chunk_queued = False
58
-
59
- # Powered by AI feature flags
60
- self._enable_feedback_loop = False
61
- self._feedback_loop_type: Optional[Literal["default", "custom"]] = None
62
- self._enable_generated_by_ai_label = False
63
- self._citations: Optional[List[ClientCitation]] = []
64
- self._sensitivity_label: Optional[SensitivityUsageInfo] = None
65
-
66
- # Channel information
67
- self._is_streaming_channel: bool = False
68
- self._channel_id: Channels = None
69
- self._interval: float = 0.1 # Default interval for sending updates
70
- self._set_defaults(context)
71
-
72
- @property
73
- def stream_id(self) -> Optional[str]:
74
- """
75
- Gets the stream ID of the current response.
76
- Assigned after the initial update is sent.
77
- """
78
- return self._stream_id
79
-
80
- @property
81
- def citations(self) -> Optional[List[ClientCitation]]:
82
- """Gets the citations of the current response."""
83
- return self._citations
84
-
85
- @property
86
- def updates_sent(self) -> int:
87
- """Gets the number of updates sent for the stream."""
88
- return self._sequence_number - 1
89
-
90
- def queue_informative_update(self, text: str) -> None:
91
- """
92
- Queues an informative update to be sent to the client.
93
-
94
- Args:
95
- text: Text of the update to send.
96
- """
97
- if not self._is_streaming_channel:
98
- return
99
-
100
- if self._ended:
101
- raise RuntimeError("The stream has already ended.")
102
-
103
- # Queue a typing activity
104
- def create_activity():
105
- activity = Activity(
106
- type="typing",
107
- text=text,
108
- entities=[
109
- Entity(
110
- type="streaminfo",
111
- stream_type="informative",
112
- stream_sequence=self._sequence_number,
113
- )
114
- ],
115
- )
116
- self._sequence_number += 1
117
- return activity
118
-
119
- self._queue_activity(create_activity)
120
-
121
- def queue_text_chunk(
122
- self, text: str, citations: Optional[List[Citation]] = None
123
- ) -> None:
124
- """
125
- Queues a chunk of partial message text to be sent to the client.
126
-
127
- The text will be sent as quickly as possible to the client.
128
- Chunks may be combined before delivery to the client.
129
-
130
- Args:
131
- text: Partial text of the message to send.
132
- citations: Citations to be included in the message.
133
- """
134
- if self._cancelled:
135
- return
136
- if self._ended:
137
- raise RuntimeError("The stream has already ended.")
138
-
139
- # Update full message text
140
- self._message += text
141
-
142
- # If there are citations, modify the content so that the sources are numbers instead of [doc1], [doc2], etc.
143
- self._message = CitationUtil.format_citations_response(self._message)
144
-
145
- # Queue the next chunk
146
- self._queue_next_chunk()
147
-
148
- async def end_stream(self) -> None:
149
- """
150
- Ends the stream by sending the final message to the client.
151
- """
152
- if self._ended:
153
- raise RuntimeError("The stream has already ended.")
154
-
155
- # Queue final message
156
- self._ended = True
157
- self._queue_next_chunk()
158
-
159
- # Wait for the queue to drain
160
- await self.wait_for_queue()
161
-
162
- def set_attachments(self, attachments: List[Attachment]) -> None:
163
- """
164
- Sets the attachments to attach to the final chunk.
165
-
166
- Args:
167
- attachments: List of attachments.
168
- """
169
- self._attachments = attachments
170
-
171
- def set_sensitivity_label(self, sensitivity_label: SensitivityUsageInfo) -> None:
172
- """
173
- Sets the sensitivity label to attach to the final chunk.
174
-
175
- Args:
176
- sensitivity_label: The sensitivity label.
177
- """
178
- self._sensitivity_label = sensitivity_label
179
-
180
- def set_citations(self, citations: List[Citation]) -> None:
181
- """
182
- Sets the citations for the full message.
183
-
184
- Args:
185
- citations: Citations to be included in the message.
186
- """
187
- if citations:
188
- if not self._citations:
189
- self._citations = []
190
-
191
- curr_pos = len(self._citations)
192
-
193
- for citation in citations:
194
- client_citation = ClientCitation(
195
- type="Claim",
196
- position=curr_pos + 1,
197
- appearance={
198
- "type": "DigitalDocument",
199
- "name": citation.title or f"Document #{curr_pos + 1}",
200
- "abstract": CitationUtil.snippet(citation.content, 477),
201
- },
202
- )
203
- curr_pos += 1
204
- self._citations.append(client_citation)
205
-
206
- def set_feedback_loop(self, enable_feedback_loop: bool) -> None:
207
- """
208
- Sets the Feedback Loop in Teams that allows a user to
209
- give thumbs up or down to a response.
210
- Default is False.
211
-
212
- Args:
213
- enable_feedback_loop: If true, the feedback loop is enabled.
214
- """
215
- self._enable_feedback_loop = enable_feedback_loop
216
-
217
- def set_feedback_loop_type(
218
- self, feedback_loop_type: Literal["default", "custom"]
219
- ) -> None:
220
- """
221
- Sets the type of UI to use for the feedback loop.
222
-
223
- Args:
224
- feedback_loop_type: The type of the feedback loop.
225
- """
226
- self._feedback_loop_type = feedback_loop_type
227
-
228
- def set_generated_by_ai_label(self, enable_generated_by_ai_label: bool) -> None:
229
- """
230
- Sets the Generated by AI label in Teams.
231
- Default is False.
232
-
233
- Args:
234
- enable_generated_by_ai_label: If true, the label is added.
235
- """
236
- self._enable_generated_by_ai_label = enable_generated_by_ai_label
237
-
238
- def get_message(self) -> str:
239
- """
240
- Returns the most recently streamed message.
241
- """
242
- return self._message
243
-
244
- async def wait_for_queue(self) -> None:
245
- """
246
- Waits for the outgoing activity queue to be empty.
247
- """
248
- if self._queue_sync:
249
- await self._queue_sync
250
-
251
- def _set_defaults(self, context: "TurnContext"):
252
- if Channels.ms_teams == context.activity.channel_id.channel:
253
- self._is_streaming_channel = True
254
- self._interval = 1.0
255
- elif Channels.direct_line == context.activity.channel_id.channel:
256
- self._is_streaming_channel = True
257
- self._interval = 0.5
258
- elif context.activity.delivery_mode == DeliveryModes.stream:
259
- self._is_streaming_channel = True
260
- self._interval = 0.1
261
-
262
- self._channel_id = context.activity.channel_id
263
-
264
- def _queue_next_chunk(self) -> None:
265
- """
266
- Queues the next chunk of text to be sent to the client.
267
- """
268
- # Are we already waiting to send a chunk?
269
- if self._chunk_queued:
270
- return
271
-
272
- # Queue a chunk of text to be sent
273
- self._chunk_queued = True
274
-
275
- def create_activity():
276
- self._chunk_queued = False
277
- if self._ended:
278
- # Send final message
279
- activity = Activity(
280
- type="message",
281
- text=self._message or "end stream response",
282
- attachments=self._attachments or [],
283
- entities=[
284
- Entity(
285
- type="streaminfo",
286
- stream_id=self._stream_id,
287
- stream_type="final",
288
- stream_sequence=self._sequence_number,
289
- )
290
- ],
291
- )
292
- elif self._is_streaming_channel:
293
- # Send typing activity
294
- activity = Activity(
295
- type="typing",
296
- text=self._message,
297
- entities=[
298
- Entity(
299
- type="streaminfo",
300
- stream_type="streaming",
301
- stream_sequence=self._sequence_number,
302
- )
303
- ],
304
- )
305
- else:
306
- return
307
- self._sequence_number += 1
308
- return activity
309
-
310
- self._queue_activity(create_activity)
311
-
312
- def _queue_activity(self, factory: Callable[[], Activity]) -> None:
313
- """
314
- Queues an activity to be sent to the client.
315
- """
316
- self._queue.append(factory)
317
-
318
- # If there's no sync in progress, start one
319
- if not self._queue_sync:
320
- self._queue_sync = asyncio.create_task(self._drain_queue())
321
-
322
- async def _drain_queue(self) -> None:
323
- """
324
- Sends any queued activities to the client until the queue is empty.
325
- """
326
- try:
327
- logger.debug(f"Draining queue with {len(self._queue)} activities.")
328
- while self._queue:
329
- factory = self._queue.pop(0)
330
- activity = factory()
331
- if activity:
332
- await self._send_activity(activity)
333
- except Exception as err:
334
- if (
335
- "403" in str(err)
336
- and self._context.activity.channel_id == Channels.ms_teams
337
- ):
338
- logger.warning("Teams channel stopped the stream.")
339
- self._cancelled = True
340
- else:
341
- logger.error(
342
- f"Error occurred when sending activity while streaming: {err}"
343
- )
344
- raise
345
- finally:
346
- self._queue_sync = None
347
-
348
- async def _send_activity(self, activity: Activity) -> None:
349
- """
350
- Sends an activity to the client and saves the stream ID returned.
351
-
352
- Args:
353
- activity: The activity to send.
354
- """
355
-
356
- streaminfo_entity = None
357
-
358
- if not activity.entities:
359
- streaminfo_entity = Entity(type="streaminfo")
360
- activity.entities = [streaminfo_entity]
361
- else:
362
- for entity in activity.entities:
363
- if hasattr(entity, "type") and entity.type == "streaminfo":
364
- streaminfo_entity = entity
365
- break
366
-
367
- if not streaminfo_entity:
368
- # If no streaminfo entity exists, create one
369
- streaminfo_entity = Entity(type="streaminfo")
370
- activity.entities.append(streaminfo_entity)
371
-
372
- # Set activity ID to the assigned stream ID
373
- if self._stream_id:
374
- activity.id = self._stream_id
375
- streaminfo_entity.stream_id = self._stream_id
376
-
377
- if self._citations and len(self._citations) > 0 and not self._ended:
378
- # Filter out the citations unused in content.
379
- curr_citations = CitationUtil.get_used_citations(
380
- self._message, self._citations
381
- )
382
- if curr_citations:
383
- activity.entities.append(
384
- Entity(
385
- type="https://schema.org/Message",
386
- schema_type="Message",
387
- context="https://schema.org",
388
- id="",
389
- citation=curr_citations,
390
- )
391
- )
392
-
393
- # Add in Powered by AI feature flags
394
- if self._ended:
395
- if self._enable_feedback_loop and self._feedback_loop_type:
396
- # Add feedback loop to streaminfo entity
397
- streaminfo_entity.feedback_loop = {"type": self._feedback_loop_type}
398
- else:
399
- # Add feedback loop enabled to streaminfo entity
400
- streaminfo_entity.feedback_loop_enabled = self._enable_feedback_loop
401
- # Add in Generated by AI
402
- if self._enable_generated_by_ai_label:
403
- activity.add_ai_metadata(self._citations, self._sensitivity_label)
404
-
405
- # Send activity
406
- response = await self._context.send_activity(activity)
407
- await asyncio.sleep(self._interval)
408
-
409
- # Save assigned stream ID
410
- if not self._stream_id and response:
411
- self._stream_id = response.id
@@ -1,17 +0,0 @@
1
- # Copyright (c) Microsoft Corporation. All rights reserved.
2
- # Licensed under the MIT License.
3
-
4
- """HTTP abstractions for framework-agnostic adapter implementations."""
5
-
6
- from ._http_request_protocol import HttpRequestProtocol
7
- from ._http_response import HttpResponse, HttpResponseFactory
8
- from ._http_adapter_base import HttpAdapterBase
9
- from ._channel_service_routes import ChannelServiceRoutes
10
-
11
- __all__ = [
12
- "HttpRequestProtocol",
13
- "HttpResponse",
14
- "HttpResponseFactory",
15
- "HttpAdapterBase",
16
- "ChannelServiceRoutes",
17
- ]
@@ -1,202 +0,0 @@
1
- # Copyright (c) Microsoft Corporation. All rights reserved.
2
- # Licensed under the MIT License.
3
-
4
- """Channel service route definitions (framework-agnostic logic)."""
5
-
6
- from typing import Type, List, Union
7
-
8
- from microsoft_agents.activity import (
9
- AgentsModel,
10
- Activity,
11
- AttachmentData,
12
- ConversationParameters,
13
- Transcript,
14
- )
15
- from microsoft_agents.hosting.core import ChannelApiHandlerProtocol
16
-
17
- from ._http_request_protocol import HttpRequestProtocol
18
-
19
-
20
- class ChannelServiceRoutes:
21
- """Defines the Channel Service API routes and their handlers.
22
-
23
- This class provides framework-agnostic route logic that can be
24
- adapted to different web frameworks (aiohttp, FastAPI, etc.).
25
- """
26
-
27
- def __init__(self, handler: ChannelApiHandlerProtocol, base_url: str = ""):
28
- """Initialize channel service routes.
29
-
30
- Args:
31
- handler: The handler that implements the Channel API protocol.
32
- base_url: Optional base URL prefix for all routes.
33
- """
34
- self.handler = handler
35
- self.base_url = base_url
36
-
37
- @staticmethod
38
- async def deserialize_from_body(
39
- request: HttpRequestProtocol, target_model: Type[AgentsModel]
40
- ) -> AgentsModel:
41
- """Deserialize request body to target model."""
42
- content_type = request.headers.get("Content-Type", "")
43
- if "application/json" not in content_type:
44
- raise ValueError("Content-Type must be application/json")
45
-
46
- body = await request.json()
47
- return target_model.model_validate(body)
48
-
49
- @staticmethod
50
- def serialize_model(model_or_list: Union[AgentsModel, List[AgentsModel]]) -> dict:
51
- """Serialize model or list of models to JSON-compatible dict."""
52
- if isinstance(model_or_list, AgentsModel):
53
- return model_or_list.model_dump(
54
- mode="json", exclude_unset=True, by_alias=True
55
- )
56
- else:
57
- return [
58
- model.model_dump(mode="json", exclude_unset=True, by_alias=True)
59
- for model in model_or_list
60
- ]
61
-
62
- # Route handler methods
63
- async def send_to_conversation(self, request: HttpRequestProtocol) -> dict:
64
- """Handle POST /v3/conversations/{conversation_id}/activities."""
65
- activity = await self.deserialize_from_body(request, Activity)
66
- conversation_id = request.get_path_param("conversation_id")
67
- result = await self.handler.on_send_to_conversation(
68
- request.get_claims_identity(),
69
- conversation_id,
70
- activity,
71
- )
72
- return self.serialize_model(result)
73
-
74
- async def reply_to_activity(self, request: HttpRequestProtocol) -> dict:
75
- """Handle POST /v3/conversations/{conversation_id}/activities/{activity_id}."""
76
- activity = await self.deserialize_from_body(request, Activity)
77
- conversation_id = request.get_path_param("conversation_id")
78
- activity_id = request.get_path_param("activity_id")
79
- result = await self.handler.on_reply_to_activity(
80
- request.get_claims_identity(),
81
- conversation_id,
82
- activity_id,
83
- activity,
84
- )
85
- return self.serialize_model(result)
86
-
87
- async def update_activity(self, request: HttpRequestProtocol) -> dict:
88
- """Handle PUT /v3/conversations/{conversation_id}/activities/{activity_id}."""
89
- activity = await self.deserialize_from_body(request, Activity)
90
- conversation_id = request.get_path_param("conversation_id")
91
- activity_id = request.get_path_param("activity_id")
92
- result = await self.handler.on_update_activity(
93
- request.get_claims_identity(),
94
- conversation_id,
95
- activity_id,
96
- activity,
97
- )
98
- return self.serialize_model(result)
99
-
100
- async def delete_activity(self, request: HttpRequestProtocol) -> None:
101
- """Handle DELETE /v3/conversations/{conversation_id}/activities/{activity_id}."""
102
- conversation_id = request.get_path_param("conversation_id")
103
- activity_id = request.get_path_param("activity_id")
104
- await self.handler.on_delete_activity(
105
- request.get_claims_identity(),
106
- conversation_id,
107
- activity_id,
108
- )
109
-
110
- async def get_activity_members(self, request: HttpRequestProtocol) -> dict:
111
- """Handle GET /v3/conversations/{conversation_id}/activities/{activity_id}/members."""
112
- conversation_id = request.get_path_param("conversation_id")
113
- activity_id = request.get_path_param("activity_id")
114
- result = await self.handler.on_get_activity_members(
115
- request.get_claims_identity(),
116
- conversation_id,
117
- activity_id,
118
- )
119
- return self.serialize_model(result)
120
-
121
- async def create_conversation(self, request: HttpRequestProtocol) -> dict:
122
- """Handle POST /."""
123
- conversation_parameters = await self.deserialize_from_body(
124
- request, ConversationParameters
125
- )
126
- result = await self.handler.on_create_conversation(
127
- request.get_claims_identity(), conversation_parameters
128
- )
129
- return self.serialize_model(result)
130
-
131
- async def get_conversations(self, request: HttpRequestProtocol) -> dict:
132
- """Handle GET /."""
133
- # TODO: continuation token? conversation_id?
134
- result = await self.handler.on_get_conversations(
135
- request.get_claims_identity(), None
136
- )
137
- return self.serialize_model(result)
138
-
139
- async def get_conversation_members(self, request: HttpRequestProtocol) -> dict:
140
- """Handle GET /v3/conversations/{conversation_id}/members."""
141
- conversation_id = request.get_path_param("conversation_id")
142
- result = await self.handler.on_get_conversation_members(
143
- request.get_claims_identity(),
144
- conversation_id,
145
- )
146
- return self.serialize_model(result)
147
-
148
- async def get_conversation_member(self, request: HttpRequestProtocol) -> dict:
149
- """Handle GET /v3/conversations/{conversation_id}/members/{member_id}."""
150
- conversation_id = request.get_path_param("conversation_id")
151
- member_id = request.get_path_param("member_id")
152
- result = await self.handler.on_get_conversation_member(
153
- request.get_claims_identity(),
154
- member_id,
155
- conversation_id,
156
- )
157
- return self.serialize_model(result)
158
-
159
- async def get_conversation_paged_members(
160
- self, request: HttpRequestProtocol
161
- ) -> dict:
162
- """Handle GET /v3/conversations/{conversation_id}/pagedmembers."""
163
- conversation_id = request.get_path_param("conversation_id")
164
- # TODO: continuation token? page size?
165
- result = await self.handler.on_get_conversation_paged_members(
166
- request.get_claims_identity(),
167
- conversation_id,
168
- )
169
- return self.serialize_model(result)
170
-
171
- async def delete_conversation_member(self, request: HttpRequestProtocol) -> dict:
172
- """Handle DELETE /v3/conversations/{conversation_id}/members/{member_id}."""
173
- conversation_id = request.get_path_param("conversation_id")
174
- member_id = request.get_path_param("member_id")
175
- result = await self.handler.on_delete_conversation_member(
176
- request.get_claims_identity(),
177
- conversation_id,
178
- member_id,
179
- )
180
- return self.serialize_model(result)
181
-
182
- async def send_conversation_history(self, request: HttpRequestProtocol) -> dict:
183
- """Handle POST /v3/conversations/{conversation_id}/activities/history."""
184
- conversation_id = request.get_path_param("conversation_id")
185
- transcript = await self.deserialize_from_body(request, Transcript)
186
- result = await self.handler.on_send_conversation_history(
187
- request.get_claims_identity(),
188
- conversation_id,
189
- transcript,
190
- )
191
- return self.serialize_model(result)
192
-
193
- async def upload_attachment(self, request: HttpRequestProtocol) -> dict:
194
- """Handle POST /v3/conversations/{conversation_id}/attachments."""
195
- conversation_id = request.get_path_param("conversation_id")
196
- attachment_data = await self.deserialize_from_body(request, AttachmentData)
197
- result = await self.handler.on_upload_attachment(
198
- request.get_claims_identity(),
199
- conversation_id,
200
- attachment_data,
201
- )
202
- return self.serialize_model(result)