microsoft-agents-hosting-core 0.6.0.dev17__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- microsoft_agents/hosting/core/__init__.py +24 -1
- microsoft_agents/hosting/core/app/agent_application.py +12 -24
- microsoft_agents/hosting/core/app/oauth/_sign_in_state.py +7 -15
- microsoft_agents/hosting/core/app/streaming/__init__.py +14 -0
- microsoft_agents/hosting/core/app/streaming/citation.py +22 -0
- microsoft_agents/hosting/core/app/streaming/citation_util.py +85 -0
- microsoft_agents/hosting/core/app/streaming/streaming_response.py +411 -0
- microsoft_agents/hosting/core/authorization/claims_identity.py +14 -0
- microsoft_agents/hosting/core/channel_service_adapter.py +28 -14
- microsoft_agents/hosting/core/connector/client/connector_client.py +0 -1
- microsoft_agents/hosting/core/connector/client/user_token_client.py +0 -1
- microsoft_agents/hosting/core/http/__init__.py +17 -0
- microsoft_agents/hosting/core/http/_channel_service_routes.py +202 -0
- microsoft_agents/hosting/core/http/_http_adapter_base.py +136 -0
- microsoft_agents/hosting/core/http/_http_request_protocol.py +36 -0
- microsoft_agents/hosting/core/http/_http_response.py +56 -0
- microsoft_agents/hosting/core/rest_channel_service_client_factory.py +2 -2
- microsoft_agents/hosting/core/storage/memory_storage.py +0 -1
- microsoft_agents/hosting/core/storage/storage.py +0 -1
- microsoft_agents/hosting/core/storage/transcript_logger.py +0 -1
- {microsoft_agents_hosting_core-0.6.0.dev17.dist-info → microsoft_agents_hosting_core-0.7.0.dist-info}/METADATA +21 -3
- {microsoft_agents_hosting_core-0.6.0.dev17.dist-info → microsoft_agents_hosting_core-0.7.0.dist-info}/RECORD +25 -16
- {microsoft_agents_hosting_core-0.6.0.dev17.dist-info → microsoft_agents_hosting_core-0.7.0.dist-info}/WHEEL +0 -0
- {microsoft_agents_hosting_core-0.6.0.dev17.dist-info → microsoft_agents_hosting_core-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {microsoft_agents_hosting_core-0.6.0.dev17.dist-info → microsoft_agents_hosting_core-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import logging
|
|
6
|
+
from typing import List, Optional, Callable, Literal, TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from microsoft_agents.activity import (
|
|
9
|
+
Activity,
|
|
10
|
+
Entity,
|
|
11
|
+
Attachment,
|
|
12
|
+
Channels,
|
|
13
|
+
ClientCitation,
|
|
14
|
+
DeliveryModes,
|
|
15
|
+
SensitivityUsageInfo,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from microsoft_agents.hosting.core.turn_context import TurnContext
|
|
20
|
+
|
|
21
|
+
from .citation import Citation
|
|
22
|
+
from .citation_util import CitationUtil
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class StreamingResponse:
|
|
28
|
+
"""
|
|
29
|
+
A helper class for streaming responses to the client.
|
|
30
|
+
|
|
31
|
+
This class is used to send a series of updates to the client in a single response.
|
|
32
|
+
The expected sequence of calls is:
|
|
33
|
+
|
|
34
|
+
`queue_informative_update()`, `queue_text_chunk()`, `queue_text_chunk()`, ..., `end_stream()`.
|
|
35
|
+
|
|
36
|
+
Once `end_stream()` is called, the stream is considered ended and no further updates can be sent.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self, context: "TurnContext"):
|
|
40
|
+
"""
|
|
41
|
+
Creates a new StreamingResponse instance.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
context: Context for the current turn of conversation with the user.
|
|
45
|
+
"""
|
|
46
|
+
self._context = context
|
|
47
|
+
self._sequence_number = 1
|
|
48
|
+
self._stream_id: Optional[str] = None
|
|
49
|
+
self._message = ""
|
|
50
|
+
self._attachments: Optional[List[Attachment]] = None
|
|
51
|
+
self._ended = False
|
|
52
|
+
self._cancelled = False
|
|
53
|
+
|
|
54
|
+
# Queue for outgoing activities
|
|
55
|
+
self._queue: List[Callable[[], Activity]] = []
|
|
56
|
+
self._queue_sync: Optional[asyncio.Task] = None
|
|
57
|
+
self._chunk_queued = False
|
|
58
|
+
|
|
59
|
+
# Powered by AI feature flags
|
|
60
|
+
self._enable_feedback_loop = False
|
|
61
|
+
self._feedback_loop_type: Optional[Literal["default", "custom"]] = None
|
|
62
|
+
self._enable_generated_by_ai_label = False
|
|
63
|
+
self._citations: Optional[List[ClientCitation]] = []
|
|
64
|
+
self._sensitivity_label: Optional[SensitivityUsageInfo] = None
|
|
65
|
+
|
|
66
|
+
# Channel information
|
|
67
|
+
self._is_streaming_channel: bool = False
|
|
68
|
+
self._channel_id: Channels = None
|
|
69
|
+
self._interval: float = 0.1 # Default interval for sending updates
|
|
70
|
+
self._set_defaults(context)
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def stream_id(self) -> Optional[str]:
|
|
74
|
+
"""
|
|
75
|
+
Gets the stream ID of the current response.
|
|
76
|
+
Assigned after the initial update is sent.
|
|
77
|
+
"""
|
|
78
|
+
return self._stream_id
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def citations(self) -> Optional[List[ClientCitation]]:
|
|
82
|
+
"""Gets the citations of the current response."""
|
|
83
|
+
return self._citations
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def updates_sent(self) -> int:
|
|
87
|
+
"""Gets the number of updates sent for the stream."""
|
|
88
|
+
return self._sequence_number - 1
|
|
89
|
+
|
|
90
|
+
def queue_informative_update(self, text: str) -> None:
|
|
91
|
+
"""
|
|
92
|
+
Queues an informative update to be sent to the client.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
text: Text of the update to send.
|
|
96
|
+
"""
|
|
97
|
+
if not self._is_streaming_channel:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
if self._ended:
|
|
101
|
+
raise RuntimeError("The stream has already ended.")
|
|
102
|
+
|
|
103
|
+
# Queue a typing activity
|
|
104
|
+
def create_activity():
|
|
105
|
+
activity = Activity(
|
|
106
|
+
type="typing",
|
|
107
|
+
text=text,
|
|
108
|
+
entities=[
|
|
109
|
+
Entity(
|
|
110
|
+
type="streaminfo",
|
|
111
|
+
stream_type="informative",
|
|
112
|
+
stream_sequence=self._sequence_number,
|
|
113
|
+
)
|
|
114
|
+
],
|
|
115
|
+
)
|
|
116
|
+
self._sequence_number += 1
|
|
117
|
+
return activity
|
|
118
|
+
|
|
119
|
+
self._queue_activity(create_activity)
|
|
120
|
+
|
|
121
|
+
def queue_text_chunk(
|
|
122
|
+
self, text: str, citations: Optional[List[Citation]] = None
|
|
123
|
+
) -> None:
|
|
124
|
+
"""
|
|
125
|
+
Queues a chunk of partial message text to be sent to the client.
|
|
126
|
+
|
|
127
|
+
The text will be sent as quickly as possible to the client.
|
|
128
|
+
Chunks may be combined before delivery to the client.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
text: Partial text of the message to send.
|
|
132
|
+
citations: Citations to be included in the message.
|
|
133
|
+
"""
|
|
134
|
+
if self._cancelled:
|
|
135
|
+
return
|
|
136
|
+
if self._ended:
|
|
137
|
+
raise RuntimeError("The stream has already ended.")
|
|
138
|
+
|
|
139
|
+
# Update full message text
|
|
140
|
+
self._message += text
|
|
141
|
+
|
|
142
|
+
# If there are citations, modify the content so that the sources are numbers instead of [doc1], [doc2], etc.
|
|
143
|
+
self._message = CitationUtil.format_citations_response(self._message)
|
|
144
|
+
|
|
145
|
+
# Queue the next chunk
|
|
146
|
+
self._queue_next_chunk()
|
|
147
|
+
|
|
148
|
+
async def end_stream(self) -> None:
|
|
149
|
+
"""
|
|
150
|
+
Ends the stream by sending the final message to the client.
|
|
151
|
+
"""
|
|
152
|
+
if self._ended:
|
|
153
|
+
raise RuntimeError("The stream has already ended.")
|
|
154
|
+
|
|
155
|
+
# Queue final message
|
|
156
|
+
self._ended = True
|
|
157
|
+
self._queue_next_chunk()
|
|
158
|
+
|
|
159
|
+
# Wait for the queue to drain
|
|
160
|
+
await self.wait_for_queue()
|
|
161
|
+
|
|
162
|
+
def set_attachments(self, attachments: List[Attachment]) -> None:
|
|
163
|
+
"""
|
|
164
|
+
Sets the attachments to attach to the final chunk.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
attachments: List of attachments.
|
|
168
|
+
"""
|
|
169
|
+
self._attachments = attachments
|
|
170
|
+
|
|
171
|
+
def set_sensitivity_label(self, sensitivity_label: SensitivityUsageInfo) -> None:
|
|
172
|
+
"""
|
|
173
|
+
Sets the sensitivity label to attach to the final chunk.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
sensitivity_label: The sensitivity label.
|
|
177
|
+
"""
|
|
178
|
+
self._sensitivity_label = sensitivity_label
|
|
179
|
+
|
|
180
|
+
def set_citations(self, citations: List[Citation]) -> None:
|
|
181
|
+
"""
|
|
182
|
+
Sets the citations for the full message.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
citations: Citations to be included in the message.
|
|
186
|
+
"""
|
|
187
|
+
if citations:
|
|
188
|
+
if not self._citations:
|
|
189
|
+
self._citations = []
|
|
190
|
+
|
|
191
|
+
curr_pos = len(self._citations)
|
|
192
|
+
|
|
193
|
+
for citation in citations:
|
|
194
|
+
client_citation = ClientCitation(
|
|
195
|
+
type="Claim",
|
|
196
|
+
position=curr_pos + 1,
|
|
197
|
+
appearance={
|
|
198
|
+
"type": "DigitalDocument",
|
|
199
|
+
"name": citation.title or f"Document #{curr_pos + 1}",
|
|
200
|
+
"abstract": CitationUtil.snippet(citation.content, 477),
|
|
201
|
+
},
|
|
202
|
+
)
|
|
203
|
+
curr_pos += 1
|
|
204
|
+
self._citations.append(client_citation)
|
|
205
|
+
|
|
206
|
+
def set_feedback_loop(self, enable_feedback_loop: bool) -> None:
|
|
207
|
+
"""
|
|
208
|
+
Sets the Feedback Loop in Teams that allows a user to
|
|
209
|
+
give thumbs up or down to a response.
|
|
210
|
+
Default is False.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
enable_feedback_loop: If true, the feedback loop is enabled.
|
|
214
|
+
"""
|
|
215
|
+
self._enable_feedback_loop = enable_feedback_loop
|
|
216
|
+
|
|
217
|
+
def set_feedback_loop_type(
|
|
218
|
+
self, feedback_loop_type: Literal["default", "custom"]
|
|
219
|
+
) -> None:
|
|
220
|
+
"""
|
|
221
|
+
Sets the type of UI to use for the feedback loop.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
feedback_loop_type: The type of the feedback loop.
|
|
225
|
+
"""
|
|
226
|
+
self._feedback_loop_type = feedback_loop_type
|
|
227
|
+
|
|
228
|
+
def set_generated_by_ai_label(self, enable_generated_by_ai_label: bool) -> None:
|
|
229
|
+
"""
|
|
230
|
+
Sets the Generated by AI label in Teams.
|
|
231
|
+
Default is False.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
enable_generated_by_ai_label: If true, the label is added.
|
|
235
|
+
"""
|
|
236
|
+
self._enable_generated_by_ai_label = enable_generated_by_ai_label
|
|
237
|
+
|
|
238
|
+
def get_message(self) -> str:
|
|
239
|
+
"""
|
|
240
|
+
Returns the most recently streamed message.
|
|
241
|
+
"""
|
|
242
|
+
return self._message
|
|
243
|
+
|
|
244
|
+
async def wait_for_queue(self) -> None:
|
|
245
|
+
"""
|
|
246
|
+
Waits for the outgoing activity queue to be empty.
|
|
247
|
+
"""
|
|
248
|
+
if self._queue_sync:
|
|
249
|
+
await self._queue_sync
|
|
250
|
+
|
|
251
|
+
def _set_defaults(self, context: "TurnContext"):
|
|
252
|
+
if Channels.ms_teams == context.activity.channel_id.channel:
|
|
253
|
+
self._is_streaming_channel = True
|
|
254
|
+
self._interval = 1.0
|
|
255
|
+
elif Channels.direct_line == context.activity.channel_id.channel:
|
|
256
|
+
self._is_streaming_channel = True
|
|
257
|
+
self._interval = 0.5
|
|
258
|
+
elif context.activity.delivery_mode == DeliveryModes.stream:
|
|
259
|
+
self._is_streaming_channel = True
|
|
260
|
+
self._interval = 0.1
|
|
261
|
+
|
|
262
|
+
self._channel_id = context.activity.channel_id
|
|
263
|
+
|
|
264
|
+
def _queue_next_chunk(self) -> None:
|
|
265
|
+
"""
|
|
266
|
+
Queues the next chunk of text to be sent to the client.
|
|
267
|
+
"""
|
|
268
|
+
# Are we already waiting to send a chunk?
|
|
269
|
+
if self._chunk_queued:
|
|
270
|
+
return
|
|
271
|
+
|
|
272
|
+
# Queue a chunk of text to be sent
|
|
273
|
+
self._chunk_queued = True
|
|
274
|
+
|
|
275
|
+
def create_activity():
|
|
276
|
+
self._chunk_queued = False
|
|
277
|
+
if self._ended:
|
|
278
|
+
# Send final message
|
|
279
|
+
activity = Activity(
|
|
280
|
+
type="message",
|
|
281
|
+
text=self._message or "end stream response",
|
|
282
|
+
attachments=self._attachments or [],
|
|
283
|
+
entities=[
|
|
284
|
+
Entity(
|
|
285
|
+
type="streaminfo",
|
|
286
|
+
stream_id=self._stream_id,
|
|
287
|
+
stream_type="final",
|
|
288
|
+
stream_sequence=self._sequence_number,
|
|
289
|
+
)
|
|
290
|
+
],
|
|
291
|
+
)
|
|
292
|
+
elif self._is_streaming_channel:
|
|
293
|
+
# Send typing activity
|
|
294
|
+
activity = Activity(
|
|
295
|
+
type="typing",
|
|
296
|
+
text=self._message,
|
|
297
|
+
entities=[
|
|
298
|
+
Entity(
|
|
299
|
+
type="streaminfo",
|
|
300
|
+
stream_type="streaming",
|
|
301
|
+
stream_sequence=self._sequence_number,
|
|
302
|
+
)
|
|
303
|
+
],
|
|
304
|
+
)
|
|
305
|
+
else:
|
|
306
|
+
return
|
|
307
|
+
self._sequence_number += 1
|
|
308
|
+
return activity
|
|
309
|
+
|
|
310
|
+
self._queue_activity(create_activity)
|
|
311
|
+
|
|
312
|
+
def _queue_activity(self, factory: Callable[[], Activity]) -> None:
|
|
313
|
+
"""
|
|
314
|
+
Queues an activity to be sent to the client.
|
|
315
|
+
"""
|
|
316
|
+
self._queue.append(factory)
|
|
317
|
+
|
|
318
|
+
# If there's no sync in progress, start one
|
|
319
|
+
if not self._queue_sync:
|
|
320
|
+
self._queue_sync = asyncio.create_task(self._drain_queue())
|
|
321
|
+
|
|
322
|
+
async def _drain_queue(self) -> None:
|
|
323
|
+
"""
|
|
324
|
+
Sends any queued activities to the client until the queue is empty.
|
|
325
|
+
"""
|
|
326
|
+
try:
|
|
327
|
+
logger.debug(f"Draining queue with {len(self._queue)} activities.")
|
|
328
|
+
while self._queue:
|
|
329
|
+
factory = self._queue.pop(0)
|
|
330
|
+
activity = factory()
|
|
331
|
+
if activity:
|
|
332
|
+
await self._send_activity(activity)
|
|
333
|
+
except Exception as err:
|
|
334
|
+
if (
|
|
335
|
+
"403" in str(err)
|
|
336
|
+
and self._context.activity.channel_id == Channels.ms_teams
|
|
337
|
+
):
|
|
338
|
+
logger.warning("Teams channel stopped the stream.")
|
|
339
|
+
self._cancelled = True
|
|
340
|
+
else:
|
|
341
|
+
logger.error(
|
|
342
|
+
f"Error occurred when sending activity while streaming: {err}"
|
|
343
|
+
)
|
|
344
|
+
raise
|
|
345
|
+
finally:
|
|
346
|
+
self._queue_sync = None
|
|
347
|
+
|
|
348
|
+
async def _send_activity(self, activity: Activity) -> None:
|
|
349
|
+
"""
|
|
350
|
+
Sends an activity to the client and saves the stream ID returned.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
activity: The activity to send.
|
|
354
|
+
"""
|
|
355
|
+
|
|
356
|
+
streaminfo_entity = None
|
|
357
|
+
|
|
358
|
+
if not activity.entities:
|
|
359
|
+
streaminfo_entity = Entity(type="streaminfo")
|
|
360
|
+
activity.entities = [streaminfo_entity]
|
|
361
|
+
else:
|
|
362
|
+
for entity in activity.entities:
|
|
363
|
+
if hasattr(entity, "type") and entity.type == "streaminfo":
|
|
364
|
+
streaminfo_entity = entity
|
|
365
|
+
break
|
|
366
|
+
|
|
367
|
+
if not streaminfo_entity:
|
|
368
|
+
# If no streaminfo entity exists, create one
|
|
369
|
+
streaminfo_entity = Entity(type="streaminfo")
|
|
370
|
+
activity.entities.append(streaminfo_entity)
|
|
371
|
+
|
|
372
|
+
# Set activity ID to the assigned stream ID
|
|
373
|
+
if self._stream_id:
|
|
374
|
+
activity.id = self._stream_id
|
|
375
|
+
streaminfo_entity.stream_id = self._stream_id
|
|
376
|
+
|
|
377
|
+
if self._citations and len(self._citations) > 0 and not self._ended:
|
|
378
|
+
# Filter out the citations unused in content.
|
|
379
|
+
curr_citations = CitationUtil.get_used_citations(
|
|
380
|
+
self._message, self._citations
|
|
381
|
+
)
|
|
382
|
+
if curr_citations:
|
|
383
|
+
activity.entities.append(
|
|
384
|
+
Entity(
|
|
385
|
+
type="https://schema.org/Message",
|
|
386
|
+
schema_type="Message",
|
|
387
|
+
context="https://schema.org",
|
|
388
|
+
id="",
|
|
389
|
+
citation=curr_citations,
|
|
390
|
+
)
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
# Add in Powered by AI feature flags
|
|
394
|
+
if self._ended:
|
|
395
|
+
if self._enable_feedback_loop and self._feedback_loop_type:
|
|
396
|
+
# Add feedback loop to streaminfo entity
|
|
397
|
+
streaminfo_entity.feedback_loop = {"type": self._feedback_loop_type}
|
|
398
|
+
else:
|
|
399
|
+
# Add feedback loop enabled to streaminfo entity
|
|
400
|
+
streaminfo_entity.feedback_loop_enabled = self._enable_feedback_loop
|
|
401
|
+
# Add in Generated by AI
|
|
402
|
+
if self._enable_generated_by_ai_label:
|
|
403
|
+
activity.add_ai_metadata(self._citations, self._sensitivity_label)
|
|
404
|
+
|
|
405
|
+
# Send activity
|
|
406
|
+
response = await self._context.send_activity(activity)
|
|
407
|
+
await asyncio.sleep(self._interval)
|
|
408
|
+
|
|
409
|
+
# Save assigned stream ID
|
|
410
|
+
if not self._stream_id and response:
|
|
411
|
+
self._stream_id = response.id
|
|
@@ -83,3 +83,17 @@ class ClaimsIdentity:
|
|
|
83
83
|
if self.is_agent_claim()
|
|
84
84
|
else AuthenticationConstants.AGENTS_SDK_SCOPE
|
|
85
85
|
)
|
|
86
|
+
|
|
87
|
+
def get_token_scope(self) -> list[str]:
|
|
88
|
+
"""
|
|
89
|
+
Gets the token scope from current claims.
|
|
90
|
+
|
|
91
|
+
:return: The token scope.
|
|
92
|
+
"""
|
|
93
|
+
return [
|
|
94
|
+
(
|
|
95
|
+
f"{self.get_outgoing_app_id()}/.default"
|
|
96
|
+
if self.is_agent_claim()
|
|
97
|
+
else AuthenticationConstants.AGENTS_SDK_SCOPE + "/.default"
|
|
98
|
+
)
|
|
99
|
+
]
|
|
@@ -341,6 +341,19 @@ class ChannelServiceAdapter(ChannelAdapter, ABC):
|
|
|
341
341
|
await connector_client.close()
|
|
342
342
|
await user_token_client.close()
|
|
343
343
|
|
|
344
|
+
def _resolve_if_connector_client_is_needed(self, activity: Activity) -> bool:
|
|
345
|
+
"""Determine if a connector client is needed based on the activity's delivery mode and service URL.
|
|
346
|
+
|
|
347
|
+
:param activity: The activity to evaluate.
|
|
348
|
+
:type activity: :class:`microsoft_agents.activity.Activity`
|
|
349
|
+
"""
|
|
350
|
+
if activity.delivery_mode in [
|
|
351
|
+
DeliveryModes.expect_replies,
|
|
352
|
+
DeliveryModes.stream,
|
|
353
|
+
]:
|
|
354
|
+
return False
|
|
355
|
+
return True
|
|
356
|
+
|
|
344
357
|
async def process_activity(
|
|
345
358
|
self,
|
|
346
359
|
claims_identity: ClaimsIdentity,
|
|
@@ -368,16 +381,14 @@ class ChannelServiceAdapter(ChannelAdapter, ABC):
|
|
|
368
381
|
If the task completes successfully, then an :class:`microsoft_agents.activity.InvokeResponse` is returned;
|
|
369
382
|
otherwise, `None` is returned.
|
|
370
383
|
"""
|
|
371
|
-
scopes: list[str] =
|
|
384
|
+
scopes: list[str] = claims_identity.get_token_scope()
|
|
372
385
|
outgoing_audience: str = None
|
|
373
386
|
|
|
374
387
|
if claims_identity.is_agent_claim():
|
|
375
388
|
outgoing_audience = claims_identity.get_token_audience()
|
|
376
|
-
scopes = [f"{claims_identity.get_outgoing_app_id()}/.default"]
|
|
377
389
|
activity.caller_id = f"{CallerIdConstants.agent_to_agent_prefix}{claims_identity.get_outgoing_app_id()}"
|
|
378
390
|
else:
|
|
379
391
|
outgoing_audience = AuthenticationConstants.AGENTS_SDK_SCOPE
|
|
380
|
-
scopes = [f"{AuthenticationConstants.AGENTS_SDK_SCOPE}/.default"]
|
|
381
392
|
|
|
382
393
|
use_anonymous_auth_callback = False
|
|
383
394
|
if (
|
|
@@ -403,21 +414,24 @@ class ChannelServiceAdapter(ChannelAdapter, ABC):
|
|
|
403
414
|
context.turn_state[self.USER_TOKEN_CLIENT_KEY] = user_token_client
|
|
404
415
|
|
|
405
416
|
# Create the connector client to use for outbound requests.
|
|
406
|
-
connector_client: ConnectorClient =
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
417
|
+
connector_client: Optional[ConnectorClient] = None
|
|
418
|
+
if self._resolve_if_connector_client_is_needed(activity):
|
|
419
|
+
connector_client = (
|
|
420
|
+
await self._channel_service_client_factory.create_connector_client(
|
|
421
|
+
context,
|
|
422
|
+
claims_identity,
|
|
423
|
+
activity.service_url,
|
|
424
|
+
outgoing_audience,
|
|
425
|
+
scopes,
|
|
426
|
+
use_anonymous_auth_callback,
|
|
427
|
+
)
|
|
414
428
|
)
|
|
415
|
-
|
|
416
|
-
context.turn_state[self._AGENT_CONNECTOR_CLIENT_KEY] = connector_client
|
|
429
|
+
context.turn_state[self._AGENT_CONNECTOR_CLIENT_KEY] = connector_client
|
|
417
430
|
|
|
418
431
|
await self.run_pipeline(context, callback)
|
|
419
432
|
|
|
420
|
-
|
|
433
|
+
if connector_client:
|
|
434
|
+
await connector_client.close()
|
|
421
435
|
await user_token_client.close()
|
|
422
436
|
|
|
423
437
|
# If there are any results they will have been left on the TurnContext.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
"""HTTP abstractions for framework-agnostic adapter implementations."""
|
|
5
|
+
|
|
6
|
+
from ._http_request_protocol import HttpRequestProtocol
|
|
7
|
+
from ._http_response import HttpResponse, HttpResponseFactory
|
|
8
|
+
from ._http_adapter_base import HttpAdapterBase
|
|
9
|
+
from ._channel_service_routes import ChannelServiceRoutes
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"HttpRequestProtocol",
|
|
13
|
+
"HttpResponse",
|
|
14
|
+
"HttpResponseFactory",
|
|
15
|
+
"HttpAdapterBase",
|
|
16
|
+
"ChannelServiceRoutes",
|
|
17
|
+
]
|