praisonaiagents 0.0.145__py3-none-any.whl → 0.0.146__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,573 @@
1
+ """
2
+ User-Friendly Performance Monitoring for PraisonAI
3
+
4
+ This module provides easy-to-use tools for monitoring function performance,
5
+ analyzing execution flow, and tracking API calls. Built on top of the existing
6
+ comprehensive telemetry infrastructure.
7
+
8
+ Features:
9
+ - Function performance evaluation with timing and statistics
10
+ - Function flow analysis and visualization
11
+ - API call tracking and performance monitoring
12
+ - Real-time performance reporting
13
+ - Easy integration with existing agents and workflows
14
+ """
15
+
16
+ import time
17
+ import json
18
+ import threading
19
+ import functools
20
+ from collections import defaultdict, deque
21
+ from typing import Dict, Any, List, Optional, Callable, Union
22
+ from contextlib import contextmanager
23
+ from datetime import datetime
24
+ import logging
25
+
26
+ try:
27
+ from .telemetry import MinimalTelemetry
28
+ TELEMETRY_AVAILABLE = True
29
+ except ImportError:
30
+ TELEMETRY_AVAILABLE = False
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ class PerformanceMonitor:
36
+ """
37
+ User-friendly performance monitoring for functions, API calls, and workflows.
38
+
39
+ Provides comprehensive performance tracking with simple APIs for:
40
+ - Function execution timing and statistics
41
+ - API call performance monitoring
42
+ - Function flow analysis
43
+ - Real-time performance reporting
44
+ """
45
+
46
+ def __init__(self, max_entries: int = 10000):
47
+ """
48
+ Initialize the performance monitor.
49
+
50
+ Args:
51
+ max_entries: Maximum number of performance entries to keep in memory
52
+ """
53
+ self.max_entries = max_entries
54
+ self._lock = threading.RLock()
55
+
56
+ # Performance data storage
57
+ self._function_stats = defaultdict(lambda: {
58
+ 'call_count': 0,
59
+ 'total_time': 0.0,
60
+ 'min_time': float('inf'),
61
+ 'max_time': 0.0,
62
+ 'recent_times': deque(maxlen=100),
63
+ 'error_count': 0,
64
+ 'last_called': None
65
+ })
66
+
67
+ self._api_calls = defaultdict(lambda: {
68
+ 'call_count': 0,
69
+ 'total_time': 0.0,
70
+ 'min_time': float('inf'),
71
+ 'max_time': 0.0,
72
+ 'success_count': 0,
73
+ 'error_count': 0,
74
+ 'recent_calls': deque(maxlen=50)
75
+ })
76
+
77
+ self._function_flow = deque(maxlen=self.max_entries)
78
+ self._active_calls = {}
79
+
80
+ # Integration with existing telemetry
81
+ self._telemetry = None
82
+ if TELEMETRY_AVAILABLE:
83
+ try:
84
+ self._telemetry = MinimalTelemetry()
85
+ except Exception as e:
86
+ logger.debug(f"Could not initialize telemetry integration: {e}")
87
+
88
+ def monitor_function(self, func_name: Optional[str] = None):
89
+ """
90
+ Decorator to monitor function performance.
91
+
92
+ Args:
93
+ func_name: Optional custom name for the function
94
+
95
+ Example:
96
+ @performance_monitor.monitor_function("my_critical_function")
97
+ def my_function():
98
+ return "result"
99
+ """
100
+ def decorator(func: Callable) -> Callable:
101
+ name = func_name or f"{func.__module__}.{func.__qualname__}"
102
+
103
+ @functools.wraps(func)
104
+ def wrapper(*args, **kwargs):
105
+ start_time = time.time()
106
+ thread_id = threading.get_ident()
107
+ call_id = f"{name}_{thread_id}_{start_time}"
108
+
109
+ # Track active call
110
+ with self._lock:
111
+ self._active_calls[call_id] = {
112
+ 'function': name,
113
+ 'start_time': start_time,
114
+ 'thread_id': thread_id
115
+ }
116
+
117
+ # Add to function flow
118
+ self._function_flow.append({
119
+ 'function': name,
120
+ 'timestamp': datetime.now(),
121
+ 'event': 'start',
122
+ 'thread_id': thread_id
123
+ })
124
+
125
+ try:
126
+ result = func(*args, **kwargs)
127
+ success = True
128
+ error = None
129
+ except Exception as e:
130
+ success = False
131
+ error = str(e)
132
+ raise
133
+ finally:
134
+ end_time = time.time()
135
+ execution_time = end_time - start_time
136
+
137
+ # Update performance statistics
138
+ self._record_function_performance(name, execution_time, success, error)
139
+
140
+ # Clean up active call
141
+ with self._lock:
142
+ self._active_calls.pop(call_id, None)
143
+
144
+ # Add to function flow
145
+ self._function_flow.append({
146
+ 'function': name,
147
+ 'timestamp': datetime.now(),
148
+ 'event': 'end',
149
+ 'duration': execution_time,
150
+ 'success': success,
151
+ 'thread_id': thread_id
152
+ })
153
+
154
+
155
+ return result
156
+
157
+ return wrapper
158
+ return decorator
159
+
160
+ @contextmanager
161
+ def track_api_call(self, api_name: str, endpoint: Optional[str] = None):
162
+ """
163
+ Context manager to track API call performance.
164
+
165
+ Args:
166
+ api_name: Name of the API (e.g., "openai", "anthropic")
167
+ endpoint: Optional specific endpoint
168
+
169
+ Example:
170
+ with performance_monitor.track_api_call("openai", "/v1/chat/completions"):
171
+ response = openai_client.chat.completions.create(...)
172
+ """
173
+ call_name = f"{api_name}:{endpoint}" if endpoint else api_name
174
+ start_time = time.time()
175
+
176
+ try:
177
+ yield
178
+ success = True
179
+ error = None
180
+ except Exception as e:
181
+ success = False
182
+ error = str(e)
183
+ raise
184
+ finally:
185
+ end_time = time.time()
186
+ execution_time = end_time - start_time
187
+ self._record_api_call(call_name, execution_time, success, error)
188
+
189
+ def _record_function_performance(self, func_name: str, execution_time: float,
190
+ success: bool, error: Optional[str] = None):
191
+ """Record function performance statistics."""
192
+ with self._lock:
193
+ stats = self._function_stats[func_name]
194
+ stats['call_count'] += 1
195
+ stats['total_time'] += execution_time
196
+ stats['min_time'] = min(stats['min_time'], execution_time)
197
+ stats['max_time'] = max(stats['max_time'], execution_time)
198
+ stats['recent_times'].append(execution_time)
199
+ stats['last_called'] = datetime.now()
200
+
201
+ if not success:
202
+ stats['error_count'] += 1
203
+
204
+ # Integrate with existing telemetry
205
+ if self._telemetry:
206
+ self._telemetry.track_tool_usage(
207
+ tool_name=func_name,
208
+ success=success,
209
+ execution_time=execution_time
210
+ )
211
+
212
+ def _record_api_call(self, api_name: str, execution_time: float,
213
+ success: bool, error: Optional[str] = None):
214
+ """Record API call performance statistics."""
215
+ with self._lock:
216
+ stats = self._api_calls[api_name]
217
+ stats['call_count'] += 1
218
+ stats['total_time'] += execution_time
219
+ stats['min_time'] = min(stats['min_time'], execution_time)
220
+ stats['max_time'] = max(stats['max_time'], execution_time)
221
+
222
+ if success:
223
+ stats['success_count'] += 1
224
+ else:
225
+ stats['error_count'] += 1
226
+
227
+ stats['recent_calls'].append({
228
+ 'timestamp': datetime.now(),
229
+ 'duration': execution_time,
230
+ 'success': success,
231
+ 'error': error
232
+ })
233
+
234
+ def get_function_performance(self, func_name: Optional[str] = None) -> Dict[str, Any]:
235
+ """
236
+ Get performance statistics for functions.
237
+
238
+ Args:
239
+ func_name: Specific function name, or None for all functions
240
+
241
+ Returns:
242
+ Dictionary with performance statistics
243
+ """
244
+ with self._lock:
245
+ if func_name:
246
+ if func_name not in self._function_stats:
247
+ return {}
248
+
249
+ stats = self._function_stats[func_name].copy()
250
+ # Convert deque to list for JSON serialization
251
+ stats['recent_times'] = list(stats['recent_times'])
252
+
253
+ # Calculate derived statistics
254
+ if stats['call_count'] > 0:
255
+ stats['average_time'] = stats['total_time'] / stats['call_count']
256
+ stats['success_rate'] = (stats['call_count'] - stats['error_count']) / stats['call_count']
257
+
258
+ if stats['recent_times']:
259
+ stats['recent_average'] = sum(stats['recent_times']) / len(stats['recent_times'])
260
+
261
+ return {func_name: stats}
262
+ else:
263
+ # Return all function statistics
264
+ result = {}
265
+ for name, stats in self._function_stats.items():
266
+ processed_stats = stats.copy()
267
+ processed_stats['recent_times'] = list(processed_stats['recent_times'])
268
+
269
+ if processed_stats['call_count'] > 0:
270
+ processed_stats['average_time'] = processed_stats['total_time'] / processed_stats['call_count']
271
+ processed_stats['success_rate'] = (processed_stats['call_count'] - processed_stats['error_count']) / processed_stats['call_count']
272
+
273
+ if processed_stats['recent_times']:
274
+ processed_stats['recent_average'] = sum(processed_stats['recent_times']) / len(processed_stats['recent_times'])
275
+
276
+ result[name] = processed_stats
277
+
278
+ return result
279
+
280
+ def get_api_call_performance(self, api_name: Optional[str] = None) -> Dict[str, Any]:
281
+ """
282
+ Get performance statistics for API calls.
283
+
284
+ Args:
285
+ api_name: Specific API name, or None for all APIs
286
+
287
+ Returns:
288
+ Dictionary with API call performance statistics
289
+ """
290
+ with self._lock:
291
+ if api_name:
292
+ if api_name not in self._api_calls:
293
+ return {}
294
+
295
+ stats = self._api_calls[api_name].copy()
296
+ # Convert deque to list for JSON serialization
297
+ recent_calls = []
298
+ for call in stats['recent_calls']:
299
+ call_copy = call.copy()
300
+ call_copy['timestamp'] = call_copy['timestamp'].isoformat()
301
+ recent_calls.append(call_copy)
302
+ stats['recent_calls'] = recent_calls
303
+
304
+ # Calculate derived statistics
305
+ if stats['call_count'] > 0:
306
+ stats['average_time'] = stats['total_time'] / stats['call_count']
307
+ stats['success_rate'] = stats['success_count'] / stats['call_count']
308
+ stats['error_rate'] = stats['error_count'] / stats['call_count']
309
+
310
+ return {api_name: stats}
311
+ else:
312
+ # Return all API statistics
313
+ result = {}
314
+ for name, stats in self._api_calls.items():
315
+ processed_stats = stats.copy()
316
+
317
+ # Convert recent calls
318
+ recent_calls = []
319
+ for call in processed_stats['recent_calls']:
320
+ call_copy = call.copy()
321
+ call_copy['timestamp'] = call_copy['timestamp'].isoformat()
322
+ recent_calls.append(call_copy)
323
+ processed_stats['recent_calls'] = recent_calls
324
+
325
+ if processed_stats['call_count'] > 0:
326
+ processed_stats['average_time'] = processed_stats['total_time'] / processed_stats['call_count']
327
+ processed_stats['success_rate'] = processed_stats['success_count'] / processed_stats['call_count']
328
+ processed_stats['error_rate'] = processed_stats['error_count'] / processed_stats['call_count']
329
+
330
+ result[name] = processed_stats
331
+
332
+ return result
333
+
334
+ def get_function_flow(self, last_n: Optional[int] = None) -> List[Dict[str, Any]]:
335
+ """
336
+ Get function execution flow information.
337
+
338
+ Args:
339
+ last_n: Number of recent flow events to return, or None for all
340
+
341
+ Returns:
342
+ List of flow events showing function execution order and timing
343
+ """
344
+ with self._lock:
345
+ flow = []
346
+ for event in self._function_flow.copy():
347
+ # Create a copy of the event to avoid modifying original
348
+ event_copy = event.copy()
349
+ # Convert timestamp to ISO format if it's a datetime object
350
+ timestamp = event_copy['timestamp']
351
+ if hasattr(timestamp, 'isoformat'):
352
+ event_copy['timestamp'] = timestamp.isoformat()
353
+ elif not isinstance(timestamp, str):
354
+ event_copy['timestamp'] = str(timestamp)
355
+ flow.append(event_copy)
356
+
357
+ if last_n:
358
+ return flow[-last_n:]
359
+ return flow
360
+
361
+ def get_active_calls(self) -> Dict[str, Any]:
362
+ """Get information about currently executing functions."""
363
+ with self._lock:
364
+ active = {}
365
+ current_time = time.time()
366
+
367
+ for call_id, info in self._active_calls.items():
368
+ duration = current_time - info['start_time']
369
+ active[call_id] = {
370
+ 'function': info['function'],
371
+ 'duration': duration,
372
+ 'thread_id': info['thread_id'],
373
+ 'started_at': datetime.fromtimestamp(info['start_time']).isoformat()
374
+ }
375
+
376
+ return active
377
+
378
+ def get_slowest_functions(self, limit: int = 10) -> List[Dict[str, Any]]:
379
+ """Get the slowest performing functions."""
380
+ with self._lock:
381
+ functions = []
382
+ for name, stats in self._function_stats.items():
383
+ if stats['call_count'] > 0:
384
+ avg_time = stats['total_time'] / stats['call_count']
385
+ functions.append({
386
+ 'function': name,
387
+ 'average_time': avg_time,
388
+ 'max_time': stats['max_time'],
389
+ 'call_count': stats['call_count']
390
+ })
391
+
392
+ # Sort by average time descending
393
+ functions.sort(key=lambda x: x['average_time'], reverse=True)
394
+ return functions[:limit]
395
+
396
+ def get_slowest_api_calls(self, limit: int = 10) -> List[Dict[str, Any]]:
397
+ """Get the slowest performing API calls."""
398
+ with self._lock:
399
+ apis = []
400
+ for name, stats in self._api_calls.items():
401
+ if stats['call_count'] > 0:
402
+ avg_time = stats['total_time'] / stats['call_count']
403
+ apis.append({
404
+ 'api': name,
405
+ 'average_time': avg_time,
406
+ 'max_time': stats['max_time'],
407
+ 'call_count': stats['call_count'],
408
+ 'success_rate': stats['success_count'] / stats['call_count']
409
+ })
410
+
411
+ # Sort by average time descending
412
+ apis.sort(key=lambda x: x['average_time'], reverse=True)
413
+ return apis[:limit]
414
+
415
+ def generate_performance_report(self) -> str:
416
+ """
417
+ Generate a comprehensive performance report.
418
+
419
+ Returns:
420
+ Formatted string with performance analysis
421
+ """
422
+ report = []
423
+ report.append("=" * 80)
424
+ report.append("PRAISONAI PERFORMANCE MONITORING REPORT")
425
+ report.append("=" * 80)
426
+ report.append(f"Generated at: {datetime.now().isoformat()}")
427
+ report.append("")
428
+
429
+ # Function performance summary
430
+ func_stats = self.get_function_performance()
431
+ if func_stats:
432
+ report.append("📊 FUNCTION PERFORMANCE SUMMARY")
433
+ report.append("-" * 40)
434
+ total_functions = len(func_stats)
435
+ total_calls = sum(stats['call_count'] for stats in func_stats.values())
436
+ total_errors = sum(stats['error_count'] for stats in func_stats.values())
437
+
438
+ report.append(f"Total Functions Monitored: {total_functions}")
439
+ report.append(f"Total Function Calls: {total_calls}")
440
+ report.append(f"Total Errors: {total_errors}")
441
+ report.append(f"Overall Success Rate: {((total_calls - total_errors) / total_calls * 100):.1f}%" if total_calls > 0 else "N/A")
442
+ report.append("")
443
+
444
+ # Slowest functions
445
+ slowest = self.get_slowest_functions(5)
446
+ if slowest:
447
+ report.append("🐌 SLOWEST FUNCTIONS (Top 5)")
448
+ for i, func in enumerate(slowest, 1):
449
+ report.append(f"{i}. {func['function']}")
450
+ report.append(f" Average: {func['average_time']:.3f}s | Max: {func['max_time']:.3f}s | Calls: {func['call_count']}")
451
+ report.append("")
452
+
453
+ # API performance summary
454
+ api_stats = self.get_api_call_performance()
455
+ if api_stats:
456
+ report.append("🌐 API CALL PERFORMANCE SUMMARY")
457
+ report.append("-" * 40)
458
+ total_apis = len(api_stats)
459
+ total_api_calls = sum(stats['call_count'] for stats in api_stats.values())
460
+ total_api_errors = sum(stats['error_count'] for stats in api_stats.values())
461
+
462
+ report.append(f"Total APIs Monitored: {total_apis}")
463
+ report.append(f"Total API Calls: {total_api_calls}")
464
+ report.append(f"Total API Errors: {total_api_errors}")
465
+ report.append(f"Overall API Success Rate: {((total_api_calls - total_api_errors) / total_api_calls * 100):.1f}%" if total_api_calls > 0 else "N/A")
466
+ report.append("")
467
+
468
+ # Slowest APIs
469
+ slowest_apis = self.get_slowest_api_calls(5)
470
+ if slowest_apis:
471
+ report.append("🐌 SLOWEST API CALLS (Top 5)")
472
+ for i, api in enumerate(slowest_apis, 1):
473
+ report.append(f"{i}. {api['api']}")
474
+ report.append(f" Average: {api['average_time']:.3f}s | Max: {api['max_time']:.3f}s | Success Rate: {api['success_rate']*100:.1f}%")
475
+ report.append("")
476
+
477
+ # Active calls
478
+ active = self.get_active_calls()
479
+ if active:
480
+ report.append("⚡ CURRENTLY ACTIVE FUNCTION CALLS")
481
+ report.append("-" * 40)
482
+ for _call_id, info in active.items():
483
+ report.append(f"• {info['function']} (running {info['duration']:.1f}s)")
484
+ report.append("")
485
+
486
+ # Function flow summary
487
+ flow = self.get_function_flow(10)
488
+ if flow:
489
+ report.append("🔄 RECENT FUNCTION FLOW (Last 10 Events)")
490
+ report.append("-" * 40)
491
+ for event in flow:
492
+ event_type = "🟢 START" if event['event'] == 'start' else "🔴 END"
493
+ duration_info = f" ({event.get('duration', 0):.3f}s)" if event['event'] == 'end' else ""
494
+ report.append(f"{event_type} {event['function']}{duration_info}")
495
+ report.append("")
496
+
497
+ report.append("=" * 80)
498
+ return "\n".join(report)
499
+
500
+ def clear_statistics(self):
501
+ """Clear all performance statistics."""
502
+ with self._lock:
503
+ self._function_stats.clear()
504
+ self._api_calls.clear()
505
+ self._function_flow.clear()
506
+ self._active_calls.clear()
507
+
508
+ def export_data(self, format: str = "json") -> Union[str, Dict[str, Any]]:
509
+ """
510
+ Export all performance data.
511
+
512
+ Args:
513
+ format: Export format ("json" or "dict")
514
+
515
+ Returns:
516
+ Performance data in requested format
517
+ """
518
+ data = {
519
+ 'functions': self.get_function_performance(),
520
+ 'api_calls': self.get_api_call_performance(),
521
+ 'flow': self.get_function_flow(),
522
+ 'active_calls': self.get_active_calls(),
523
+ 'export_timestamp': datetime.now().isoformat()
524
+ }
525
+
526
+ if format == "json":
527
+ return json.dumps(data, indent=2)
528
+ return data
529
+
530
+
531
+ # Global performance monitor instance
532
+ performance_monitor = PerformanceMonitor()
533
+
534
+
535
+ # Convenience functions for easy access
536
+ def monitor_function(func_name: Optional[str] = None):
537
+ """Convenience decorator for monitoring function performance."""
538
+ return performance_monitor.monitor_function(func_name)
539
+
540
+
541
+ def track_api_call(api_name: str, endpoint: Optional[str] = None):
542
+ """Convenience context manager for tracking API calls."""
543
+ return performance_monitor.track_api_call(api_name, endpoint)
544
+
545
+
546
+ def get_performance_report() -> str:
547
+ """Get a comprehensive performance report."""
548
+ return performance_monitor.generate_performance_report()
549
+
550
+
551
+ def get_function_stats(func_name: Optional[str] = None) -> Dict[str, Any]:
552
+ """Get function performance statistics."""
553
+ return performance_monitor.get_function_performance(func_name)
554
+
555
+
556
+ def get_api_stats(api_name: Optional[str] = None) -> Dict[str, Any]:
557
+ """Get API call performance statistics."""
558
+ return performance_monitor.get_api_call_performance(api_name)
559
+
560
+
561
+ def get_slowest_functions(limit: int = 10) -> List[Dict[str, Any]]:
562
+ """Get the slowest performing functions."""
563
+ return performance_monitor.get_slowest_functions(limit)
564
+
565
+
566
+ def get_slowest_apis(limit: int = 10) -> List[Dict[str, Any]]:
567
+ """Get the slowest performing API calls."""
568
+ return performance_monitor.get_slowest_api_calls(limit)
569
+
570
+
571
+ def clear_performance_data():
572
+ """Clear all performance monitoring data."""
573
+ performance_monitor.clear_statistics()