openlit 1.34.31__py3-none-any.whl → 1.34.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openlit/__init__.py +3 -1
- openlit/instrumentation/ag2/__init__.py +92 -1
- openlit/instrumentation/ag2/ag2.py +425 -4
- openlit/instrumentation/ag2/async_ag2.py +425 -4
- openlit/instrumentation/ag2/utils.py +343 -2
- openlit/semcov/__init__.py +10 -0
- {openlit-1.34.31.dist-info → openlit-1.34.32.dist-info}/METADATA +1 -1
- {openlit-1.34.31.dist-info → openlit-1.34.32.dist-info}/RECORD +10 -10
- {openlit-1.34.31.dist-info → openlit-1.34.32.dist-info}/LICENSE +0 -0
- {openlit-1.34.31.dist-info → openlit-1.34.32.dist-info}/WHEEL +0 -0
openlit/__init__.py
CHANGED
@@ -268,7 +268,9 @@ def init(
|
|
268
268
|
logger.error(
|
269
269
|
"OpenLIT metrics setup failed. Metrics will not be available: %s", err
|
270
270
|
)
|
271
|
-
|
271
|
+
# Set metrics_dict to None and disable metrics instead of returning early
|
272
|
+
metrics_dict = None
|
273
|
+
disable_metrics = True
|
272
274
|
|
273
275
|
if (
|
274
276
|
os.getenv("OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT", "").lower
|
@@ -5,10 +5,21 @@ import importlib.metadata
|
|
5
5
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
6
6
|
from wrapt import wrap_function_wrapper
|
7
7
|
|
8
|
-
from openlit.instrumentation.ag2.ag2 import
|
8
|
+
from openlit.instrumentation.ag2.ag2 import (
|
9
|
+
conversable_agent,
|
10
|
+
agent_run,
|
11
|
+
agent_generate_reply,
|
12
|
+
agent_receive,
|
13
|
+
agent_send,
|
14
|
+
groupchat_manager_run_chat,
|
15
|
+
groupchat_select_speaker,
|
16
|
+
)
|
9
17
|
from openlit.instrumentation.ag2.async_ag2 import (
|
10
18
|
async_conversable_agent,
|
11
19
|
async_agent_run,
|
20
|
+
async_agent_generate_reply,
|
21
|
+
async_agent_receive,
|
22
|
+
async_agent_send,
|
12
23
|
)
|
13
24
|
|
14
25
|
_instruments = ("ag2 >= 0.3.2",)
|
@@ -64,6 +75,86 @@ class AG2Instrumentor(BaseInstrumentor):
|
|
64
75
|
),
|
65
76
|
)
|
66
77
|
|
78
|
+
# sync agent generate_reply
|
79
|
+
wrap_function_wrapper(
|
80
|
+
"autogen.agentchat.conversable_agent",
|
81
|
+
"ConversableAgent.generate_reply",
|
82
|
+
agent_generate_reply(
|
83
|
+
version,
|
84
|
+
environment,
|
85
|
+
application_name,
|
86
|
+
tracer,
|
87
|
+
pricing_info,
|
88
|
+
capture_message_content,
|
89
|
+
metrics,
|
90
|
+
disable_metrics,
|
91
|
+
),
|
92
|
+
)
|
93
|
+
|
94
|
+
# sync agent receive
|
95
|
+
wrap_function_wrapper(
|
96
|
+
"autogen.agentchat.conversable_agent",
|
97
|
+
"ConversableAgent.receive",
|
98
|
+
agent_receive(
|
99
|
+
version,
|
100
|
+
environment,
|
101
|
+
application_name,
|
102
|
+
tracer,
|
103
|
+
pricing_info,
|
104
|
+
capture_message_content,
|
105
|
+
metrics,
|
106
|
+
disable_metrics,
|
107
|
+
),
|
108
|
+
)
|
109
|
+
|
110
|
+
# sync agent send
|
111
|
+
wrap_function_wrapper(
|
112
|
+
"autogen.agentchat.conversable_agent",
|
113
|
+
"ConversableAgent.send",
|
114
|
+
agent_send(
|
115
|
+
version,
|
116
|
+
environment,
|
117
|
+
application_name,
|
118
|
+
tracer,
|
119
|
+
pricing_info,
|
120
|
+
capture_message_content,
|
121
|
+
metrics,
|
122
|
+
disable_metrics,
|
123
|
+
),
|
124
|
+
)
|
125
|
+
|
126
|
+
# sync groupchat manager run_chat
|
127
|
+
wrap_function_wrapper(
|
128
|
+
"autogen.agentchat.groupchat",
|
129
|
+
"GroupChatManager.run_chat",
|
130
|
+
groupchat_manager_run_chat(
|
131
|
+
version,
|
132
|
+
environment,
|
133
|
+
application_name,
|
134
|
+
tracer,
|
135
|
+
pricing_info,
|
136
|
+
capture_message_content,
|
137
|
+
metrics,
|
138
|
+
disable_metrics,
|
139
|
+
),
|
140
|
+
)
|
141
|
+
|
142
|
+
# sync groupchat select_speaker
|
143
|
+
wrap_function_wrapper(
|
144
|
+
"autogen.agentchat.groupchat",
|
145
|
+
"GroupChat.select_speaker",
|
146
|
+
groupchat_select_speaker(
|
147
|
+
version,
|
148
|
+
environment,
|
149
|
+
application_name,
|
150
|
+
tracer,
|
151
|
+
pricing_info,
|
152
|
+
capture_message_content,
|
153
|
+
metrics,
|
154
|
+
disable_metrics,
|
155
|
+
),
|
156
|
+
)
|
157
|
+
|
67
158
|
def _uninstrument(self, **kwargs):
|
68
159
|
# Proper uninstrumentation logic to revert patched methods
|
69
160
|
pass
|
@@ -8,10 +8,45 @@ from openlit.__helpers import handle_exception, set_server_address_and_port
|
|
8
8
|
from openlit.instrumentation.ag2.utils import (
|
9
9
|
process_agent_creation,
|
10
10
|
process_agent_run,
|
11
|
+
process_agent_generate_reply,
|
12
|
+
process_agent_receive,
|
13
|
+
process_agent_send,
|
14
|
+
process_groupchat_operation,
|
15
|
+
process_speaker_selection,
|
11
16
|
)
|
12
17
|
from openlit.semcov import SemanticConvention
|
13
18
|
|
14
19
|
|
20
|
+
def extract_agent_name(instance, fallback="unknown_agent"):
|
21
|
+
"""
|
22
|
+
Extract agent name from AG2 instance with intelligent fallbacks.
|
23
|
+
|
24
|
+
Args:
|
25
|
+
instance: AG2 instance (Agent, GroupChat, etc.)
|
26
|
+
fallback: Default name if no name can be extracted
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
str: Agent name or meaningful fallback
|
30
|
+
"""
|
31
|
+
# Try to get the name attribute first
|
32
|
+
agent_name = getattr(instance, "name", None)
|
33
|
+
if agent_name:
|
34
|
+
return agent_name
|
35
|
+
|
36
|
+
# Try to get from class name and make it meaningful
|
37
|
+
class_name = getattr(instance, "__class__", type(instance)).__name__.lower()
|
38
|
+
|
39
|
+
# Map common AG2 class names to meaningful names
|
40
|
+
class_name_map = {
|
41
|
+
"conversableagent": "conversable_agent",
|
42
|
+
"groupchat": "group_chat",
|
43
|
+
"groupchatmanager": "group_chat_manager",
|
44
|
+
"agent": "agent",
|
45
|
+
}
|
46
|
+
|
47
|
+
return class_name_map.get(class_name, fallback)
|
48
|
+
|
49
|
+
|
15
50
|
def conversable_agent(
|
16
51
|
version,
|
17
52
|
environment,
|
@@ -34,7 +69,7 @@ def conversable_agent(
|
|
34
69
|
server_address, server_port = set_server_address_and_port(
|
35
70
|
instance, "127.0.0.1", 80
|
36
71
|
)
|
37
|
-
agent_name = kwargs.get("name", "
|
72
|
+
agent_name = kwargs.get("name", "unknown_agent")
|
38
73
|
llm_config = kwargs.get("llm_config", {})
|
39
74
|
system_message = kwargs.get("system_message", "")
|
40
75
|
|
@@ -96,12 +131,12 @@ def agent_run(
|
|
96
131
|
)
|
97
132
|
|
98
133
|
# Extract agent name from instance
|
99
|
-
agent_name =
|
134
|
+
agent_name = extract_agent_name(instance)
|
100
135
|
|
101
136
|
# Extract model from instance llm_config
|
102
|
-
request_model = "
|
137
|
+
request_model = "unknown"
|
103
138
|
if hasattr(instance, "llm_config") and isinstance(instance.llm_config, dict):
|
104
|
-
request_model = instance.llm_config.get("model", "
|
139
|
+
request_model = instance.llm_config.get("model", "unknown")
|
105
140
|
|
106
141
|
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_EXECUTE_AGENT_TASK} {agent_name}"
|
107
142
|
|
@@ -133,3 +168,389 @@ def agent_run(
|
|
133
168
|
return response
|
134
169
|
|
135
170
|
return wrapper
|
171
|
+
|
172
|
+
|
173
|
+
def agent_generate_reply(
|
174
|
+
version,
|
175
|
+
environment,
|
176
|
+
application_name,
|
177
|
+
tracer,
|
178
|
+
pricing_info,
|
179
|
+
capture_message_content,
|
180
|
+
metrics,
|
181
|
+
disable_metrics,
|
182
|
+
):
|
183
|
+
"""
|
184
|
+
Generates a telemetry wrapper for AG2 ConversableAgent.generate_reply.
|
185
|
+
"""
|
186
|
+
|
187
|
+
def wrapper(wrapped, instance, args, kwargs):
|
188
|
+
"""
|
189
|
+
Wraps the AG2 ConversableAgent.generate_reply call.
|
190
|
+
"""
|
191
|
+
|
192
|
+
server_address, server_port = set_server_address_and_port(
|
193
|
+
instance, "127.0.0.1", 80
|
194
|
+
)
|
195
|
+
|
196
|
+
# Extract agent name from instance
|
197
|
+
agent_name = extract_agent_name(instance)
|
198
|
+
|
199
|
+
# Extract model from instance llm_config
|
200
|
+
request_model = "unknown"
|
201
|
+
if hasattr(instance, "llm_config") and isinstance(instance.llm_config, dict):
|
202
|
+
request_model = instance.llm_config.get("model", "unknown")
|
203
|
+
|
204
|
+
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_EXECUTE_AGENT_TASK} {agent_name}"
|
205
|
+
|
206
|
+
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
207
|
+
start_time = time.time()
|
208
|
+
response = wrapped(*args, **kwargs)
|
209
|
+
|
210
|
+
try:
|
211
|
+
response = process_agent_generate_reply(
|
212
|
+
response=response,
|
213
|
+
agent_name=agent_name,
|
214
|
+
request_model=request_model,
|
215
|
+
messages=args[0] if args else kwargs.get("messages", []),
|
216
|
+
sender=args[1] if len(args) > 1 else kwargs.get("sender", None),
|
217
|
+
pricing_info=pricing_info,
|
218
|
+
server_port=server_port,
|
219
|
+
server_address=server_address,
|
220
|
+
environment=environment,
|
221
|
+
application_name=application_name,
|
222
|
+
metrics=metrics,
|
223
|
+
start_time=start_time,
|
224
|
+
span=span,
|
225
|
+
capture_message_content=capture_message_content,
|
226
|
+
disable_metrics=disable_metrics,
|
227
|
+
version=version,
|
228
|
+
)
|
229
|
+
|
230
|
+
except Exception as e:
|
231
|
+
handle_exception(span, e)
|
232
|
+
|
233
|
+
return response
|
234
|
+
|
235
|
+
return wrapper
|
236
|
+
|
237
|
+
|
238
|
+
def agent_receive(
|
239
|
+
version,
|
240
|
+
environment,
|
241
|
+
application_name,
|
242
|
+
tracer,
|
243
|
+
pricing_info,
|
244
|
+
capture_message_content,
|
245
|
+
metrics,
|
246
|
+
disable_metrics,
|
247
|
+
):
|
248
|
+
"""
|
249
|
+
Generates a telemetry wrapper for AG2 ConversableAgent.receive.
|
250
|
+
"""
|
251
|
+
|
252
|
+
def wrapper(wrapped, instance, args, kwargs):
|
253
|
+
"""
|
254
|
+
Wraps the AG2 ConversableAgent.receive call.
|
255
|
+
"""
|
256
|
+
|
257
|
+
server_address, server_port = set_server_address_and_port(
|
258
|
+
instance, "127.0.0.1", 80
|
259
|
+
)
|
260
|
+
|
261
|
+
# Extract agent name from instance
|
262
|
+
agent_name = extract_agent_name(instance)
|
263
|
+
|
264
|
+
# Extract sender information
|
265
|
+
sender = args[0] if args else kwargs.get("sender", None)
|
266
|
+
sender_name = getattr(sender, "name", "Unknown") if sender else "Unknown"
|
267
|
+
|
268
|
+
# Extract message
|
269
|
+
message = args[1] if len(args) > 1 else kwargs.get("message", "")
|
270
|
+
|
271
|
+
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_EXECUTE_AGENT_TASK} {agent_name}"
|
272
|
+
|
273
|
+
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
274
|
+
start_time = time.time()
|
275
|
+
response = wrapped(*args, **kwargs)
|
276
|
+
|
277
|
+
try:
|
278
|
+
process_agent_receive(
|
279
|
+
message=message,
|
280
|
+
agent_name=agent_name,
|
281
|
+
sender_name=sender_name,
|
282
|
+
agent_instance=instance,
|
283
|
+
pricing_info=pricing_info,
|
284
|
+
server_port=server_port,
|
285
|
+
server_address=server_address,
|
286
|
+
environment=environment,
|
287
|
+
application_name=application_name,
|
288
|
+
metrics=metrics,
|
289
|
+
start_time=start_time,
|
290
|
+
span=span,
|
291
|
+
capture_message_content=capture_message_content,
|
292
|
+
disable_metrics=disable_metrics,
|
293
|
+
version=version,
|
294
|
+
)
|
295
|
+
|
296
|
+
except Exception as e:
|
297
|
+
handle_exception(span, e)
|
298
|
+
|
299
|
+
return response
|
300
|
+
|
301
|
+
return wrapper
|
302
|
+
|
303
|
+
|
304
|
+
def agent_send(
|
305
|
+
version,
|
306
|
+
environment,
|
307
|
+
application_name,
|
308
|
+
tracer,
|
309
|
+
pricing_info,
|
310
|
+
capture_message_content,
|
311
|
+
metrics,
|
312
|
+
disable_metrics,
|
313
|
+
):
|
314
|
+
"""
|
315
|
+
Generates a telemetry wrapper for AG2 ConversableAgent.send.
|
316
|
+
"""
|
317
|
+
|
318
|
+
def wrapper(wrapped, instance, args, kwargs):
|
319
|
+
"""
|
320
|
+
Wraps the AG2 ConversableAgent.send call.
|
321
|
+
"""
|
322
|
+
|
323
|
+
server_address, server_port = set_server_address_and_port(
|
324
|
+
instance, "127.0.0.1", 80
|
325
|
+
)
|
326
|
+
|
327
|
+
# Extract agent name from instance
|
328
|
+
agent_name = extract_agent_name(instance)
|
329
|
+
|
330
|
+
# Extract recipient information
|
331
|
+
recipient = args[0] if args else kwargs.get("recipient", None)
|
332
|
+
recipient_name = (
|
333
|
+
getattr(recipient, "name", "Unknown") if recipient else "Unknown"
|
334
|
+
)
|
335
|
+
|
336
|
+
# Extract message
|
337
|
+
message = args[1] if len(args) > 1 else kwargs.get("message", "")
|
338
|
+
|
339
|
+
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_EXECUTE_AGENT_TASK} {agent_name}"
|
340
|
+
|
341
|
+
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
342
|
+
start_time = time.time()
|
343
|
+
response = wrapped(*args, **kwargs)
|
344
|
+
|
345
|
+
try:
|
346
|
+
process_agent_send(
|
347
|
+
message=message,
|
348
|
+
agent_name=agent_name,
|
349
|
+
recipient_name=recipient_name,
|
350
|
+
agent_instance=instance,
|
351
|
+
pricing_info=pricing_info,
|
352
|
+
server_port=server_port,
|
353
|
+
server_address=server_address,
|
354
|
+
environment=environment,
|
355
|
+
application_name=application_name,
|
356
|
+
metrics=metrics,
|
357
|
+
start_time=start_time,
|
358
|
+
span=span,
|
359
|
+
capture_message_content=capture_message_content,
|
360
|
+
disable_metrics=disable_metrics,
|
361
|
+
version=version,
|
362
|
+
)
|
363
|
+
|
364
|
+
except Exception as e:
|
365
|
+
handle_exception(span, e)
|
366
|
+
|
367
|
+
return response
|
368
|
+
|
369
|
+
return wrapper
|
370
|
+
|
371
|
+
|
372
|
+
def groupchat_manager_run_chat(
|
373
|
+
version,
|
374
|
+
environment,
|
375
|
+
application_name,
|
376
|
+
tracer,
|
377
|
+
pricing_info,
|
378
|
+
capture_message_content,
|
379
|
+
metrics,
|
380
|
+
disable_metrics,
|
381
|
+
):
|
382
|
+
"""
|
383
|
+
Generates a telemetry wrapper for AG2 GroupChatManager.run_chat.
|
384
|
+
"""
|
385
|
+
|
386
|
+
def wrapper(wrapped, instance, args, kwargs):
|
387
|
+
"""
|
388
|
+
Wraps the AG2 GroupChatManager.run_chat call.
|
389
|
+
"""
|
390
|
+
|
391
|
+
server_address, server_port = set_server_address_and_port(
|
392
|
+
instance, "127.0.0.1", 80
|
393
|
+
)
|
394
|
+
|
395
|
+
# Extract groupchat information
|
396
|
+
groupchat = getattr(instance, "groupchat", None)
|
397
|
+
if groupchat:
|
398
|
+
participants = [agent.name for agent in groupchat.agents]
|
399
|
+
group_name = f"GroupChat_{len(participants)}_agents"
|
400
|
+
else:
|
401
|
+
participants = []
|
402
|
+
group_name = "UnknownGroupChat"
|
403
|
+
|
404
|
+
# Extract model information from GroupChatManager
|
405
|
+
request_model = "unknown" # Default fallback
|
406
|
+
if hasattr(instance, "llm_config") and isinstance(instance.llm_config, dict):
|
407
|
+
request_model = instance.llm_config.get("model", "unknown")
|
408
|
+
|
409
|
+
# Try to get more specific model from groupchat
|
410
|
+
if groupchat and hasattr(groupchat, "select_speaker_auto_llm_config"):
|
411
|
+
llm_config = groupchat.select_speaker_auto_llm_config
|
412
|
+
if isinstance(llm_config, dict):
|
413
|
+
request_model = llm_config.get("model", request_model)
|
414
|
+
elif hasattr(llm_config, "model"):
|
415
|
+
request_model = llm_config.model
|
416
|
+
|
417
|
+
# Extract sender information
|
418
|
+
sender = kwargs.get("sender", None)
|
419
|
+
|
420
|
+
# Extract messages
|
421
|
+
messages = args[0] if args else kwargs.get("messages", [])
|
422
|
+
|
423
|
+
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_FRAMEWORK} {group_name}"
|
424
|
+
|
425
|
+
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
426
|
+
start_time = time.time()
|
427
|
+
response = wrapped(*args, **kwargs)
|
428
|
+
|
429
|
+
try:
|
430
|
+
process_groupchat_operation(
|
431
|
+
group_name=group_name,
|
432
|
+
participants=participants,
|
433
|
+
messages=messages,
|
434
|
+
sender=sender,
|
435
|
+
max_turns=None, # Not available in new API
|
436
|
+
request_model=request_model,
|
437
|
+
pricing_info=pricing_info,
|
438
|
+
server_port=server_port,
|
439
|
+
server_address=server_address,
|
440
|
+
environment=environment,
|
441
|
+
application_name=application_name,
|
442
|
+
metrics=metrics,
|
443
|
+
start_time=start_time,
|
444
|
+
span=span,
|
445
|
+
capture_message_content=capture_message_content,
|
446
|
+
disable_metrics=disable_metrics,
|
447
|
+
version=version,
|
448
|
+
)
|
449
|
+
|
450
|
+
except Exception as e:
|
451
|
+
handle_exception(span, e)
|
452
|
+
|
453
|
+
return response
|
454
|
+
|
455
|
+
return wrapper
|
456
|
+
|
457
|
+
|
458
|
+
def groupchat_select_speaker(
|
459
|
+
version,
|
460
|
+
environment,
|
461
|
+
application_name,
|
462
|
+
tracer,
|
463
|
+
pricing_info,
|
464
|
+
capture_message_content,
|
465
|
+
metrics,
|
466
|
+
disable_metrics,
|
467
|
+
):
|
468
|
+
"""
|
469
|
+
Generates a telemetry wrapper for AG2 GroupChat.select_speaker.
|
470
|
+
"""
|
471
|
+
|
472
|
+
def wrapper(wrapped, instance, args, kwargs):
|
473
|
+
"""
|
474
|
+
Wraps the AG2 GroupChat.select_speaker call.
|
475
|
+
"""
|
476
|
+
|
477
|
+
server_address, server_port = set_server_address_and_port(
|
478
|
+
instance, "127.0.0.1", 80
|
479
|
+
)
|
480
|
+
|
481
|
+
# Extract speaker information
|
482
|
+
last_speaker = args[0] if args else kwargs.get("last_speaker", None)
|
483
|
+
selector = args[1] if len(args) > 1 else kwargs.get("selector", None)
|
484
|
+
|
485
|
+
last_speaker_name = (
|
486
|
+
getattr(last_speaker, "name", "Unknown") if last_speaker else "Unknown"
|
487
|
+
)
|
488
|
+
|
489
|
+
# Extract agents list
|
490
|
+
agents = getattr(instance, "agents", [])
|
491
|
+
|
492
|
+
# Extract model information from GroupChat instance
|
493
|
+
request_model = "unknown" # Default fallback
|
494
|
+
# Check for speaker selection specific config
|
495
|
+
if hasattr(instance, "select_speaker_auto_llm_config"):
|
496
|
+
llm_config = instance.select_speaker_auto_llm_config
|
497
|
+
if isinstance(llm_config, dict):
|
498
|
+
request_model = llm_config.get("model", "unknown")
|
499
|
+
elif hasattr(llm_config, "model"):
|
500
|
+
request_model = llm_config.model
|
501
|
+
|
502
|
+
# Try to get model from selector if available
|
503
|
+
if (
|
504
|
+
selector
|
505
|
+
and hasattr(selector, "llm_config")
|
506
|
+
and isinstance(selector.llm_config, dict)
|
507
|
+
):
|
508
|
+
request_model = selector.llm_config.get("model", request_model)
|
509
|
+
|
510
|
+
# Try to get model from agents if still unknown
|
511
|
+
if request_model == "unknown" and agents:
|
512
|
+
for agent in agents:
|
513
|
+
if hasattr(agent, "llm_config") and isinstance(agent.llm_config, dict):
|
514
|
+
model = agent.llm_config.get("model")
|
515
|
+
if model:
|
516
|
+
request_model = model
|
517
|
+
break
|
518
|
+
|
519
|
+
span_name = (
|
520
|
+
f"{SemanticConvention.GEN_AI_OPERATION_TYPE_AGENT} speaker_selection"
|
521
|
+
)
|
522
|
+
|
523
|
+
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
524
|
+
start_time = time.time()
|
525
|
+
response = wrapped(*args, **kwargs)
|
526
|
+
|
527
|
+
try:
|
528
|
+
selected_speaker_name = (
|
529
|
+
getattr(response, "name", "Unknown") if response else "Unknown"
|
530
|
+
)
|
531
|
+
|
532
|
+
process_speaker_selection(
|
533
|
+
last_speaker=last_speaker_name,
|
534
|
+
selected_speaker=selected_speaker_name,
|
535
|
+
selector=selector,
|
536
|
+
agents=agents,
|
537
|
+
request_model=request_model,
|
538
|
+
pricing_info=pricing_info,
|
539
|
+
server_port=server_port,
|
540
|
+
server_address=server_address,
|
541
|
+
environment=environment,
|
542
|
+
application_name=application_name,
|
543
|
+
metrics=metrics,
|
544
|
+
start_time=start_time,
|
545
|
+
span=span,
|
546
|
+
capture_message_content=capture_message_content,
|
547
|
+
disable_metrics=disable_metrics,
|
548
|
+
version=version,
|
549
|
+
)
|
550
|
+
|
551
|
+
except Exception as e:
|
552
|
+
handle_exception(span, e)
|
553
|
+
|
554
|
+
return response
|
555
|
+
|
556
|
+
return wrapper
|