agnt5 0.1.0__cp39-abi3-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. agnt5/__init__.py +307 -0
  2. agnt5/__pycache__/__init__.cpython-311.pyc +0 -0
  3. agnt5/__pycache__/agent.cpython-311.pyc +0 -0
  4. agnt5/__pycache__/context.cpython-311.pyc +0 -0
  5. agnt5/__pycache__/durable.cpython-311.pyc +0 -0
  6. agnt5/__pycache__/extraction.cpython-311.pyc +0 -0
  7. agnt5/__pycache__/memory.cpython-311.pyc +0 -0
  8. agnt5/__pycache__/reflection.cpython-311.pyc +0 -0
  9. agnt5/__pycache__/runtime.cpython-311.pyc +0 -0
  10. agnt5/__pycache__/task.cpython-311.pyc +0 -0
  11. agnt5/__pycache__/tool.cpython-311.pyc +0 -0
  12. agnt5/__pycache__/tracing.cpython-311.pyc +0 -0
  13. agnt5/__pycache__/types.cpython-311.pyc +0 -0
  14. agnt5/__pycache__/workflow.cpython-311.pyc +0 -0
  15. agnt5/_core.abi3.so +0 -0
  16. agnt5/agent.py +1086 -0
  17. agnt5/context.py +406 -0
  18. agnt5/durable.py +1050 -0
  19. agnt5/extraction.py +410 -0
  20. agnt5/llm/__init__.py +179 -0
  21. agnt5/llm/__pycache__/__init__.cpython-311.pyc +0 -0
  22. agnt5/llm/__pycache__/anthropic.cpython-311.pyc +0 -0
  23. agnt5/llm/__pycache__/azure.cpython-311.pyc +0 -0
  24. agnt5/llm/__pycache__/base.cpython-311.pyc +0 -0
  25. agnt5/llm/__pycache__/google.cpython-311.pyc +0 -0
  26. agnt5/llm/__pycache__/mistral.cpython-311.pyc +0 -0
  27. agnt5/llm/__pycache__/openai.cpython-311.pyc +0 -0
  28. agnt5/llm/__pycache__/together.cpython-311.pyc +0 -0
  29. agnt5/llm/anthropic.py +319 -0
  30. agnt5/llm/azure.py +348 -0
  31. agnt5/llm/base.py +315 -0
  32. agnt5/llm/google.py +373 -0
  33. agnt5/llm/mistral.py +330 -0
  34. agnt5/llm/model_registry.py +467 -0
  35. agnt5/llm/models.json +227 -0
  36. agnt5/llm/openai.py +334 -0
  37. agnt5/llm/together.py +377 -0
  38. agnt5/memory.py +746 -0
  39. agnt5/reflection.py +514 -0
  40. agnt5/runtime.py +699 -0
  41. agnt5/task.py +476 -0
  42. agnt5/testing.py +451 -0
  43. agnt5/tool.py +516 -0
  44. agnt5/tracing.py +624 -0
  45. agnt5/types.py +210 -0
  46. agnt5/workflow.py +897 -0
  47. agnt5-0.1.0.dist-info/METADATA +93 -0
  48. agnt5-0.1.0.dist-info/RECORD +49 -0
  49. agnt5-0.1.0.dist-info/WHEEL +4 -0
agnt5/tracing.py ADDED
@@ -0,0 +1,624 @@
1
+ """
2
+ OpenAI-style tracing API for AGNT5 Python SDK.
3
+
4
+ This module provides a comprehensive tracing system inspired by OpenAI's approach,
5
+ with zero external dependencies while supporting full integration with the
6
+ Rust SDK-Core through FFI bridge calls.
7
+
8
+ Design Principles:
9
+ - Zero Dependencies: No OpenTelemetry imports in Python SDK
10
+ - Simple API: Easy-to-use functions for spans, logs, and metrics
11
+ - SDK-Core Communication: Send telemetry data via existing FFI bridge
12
+ - Backward Compatible: Non-breaking addition to existing APIs
13
+ """
14
+
15
+ import os
16
+ import time
17
+ import uuid
18
+ import contextvars
19
+ import inspect
20
+ from typing import Dict, Optional, Any, Union, List, Callable
21
+ from contextlib import contextmanager
22
+ from dataclasses import dataclass, field
23
+ from enum import Enum
24
+ import threading
25
+ import weakref
26
+
27
+
28
+ def _get_caller_info(skip_frames: int = 2) -> Dict[str, Any]:
29
+ """Get file and line information for the caller"""
30
+ try:
31
+ frame = inspect.currentframe()
32
+ for _ in range(skip_frames):
33
+ if frame is not None:
34
+ frame = frame.f_back
35
+
36
+ if frame is not None:
37
+ filename = frame.f_code.co_filename
38
+ lineno = frame.f_lineno
39
+ func_name = frame.f_code.co_name
40
+
41
+ # Get relative path for cleaner output
42
+ if "/" in filename:
43
+ short_filename = "/".join(filename.split("/")[-2:]) # last 2 parts
44
+ else:
45
+ short_filename = filename
46
+
47
+ return {
48
+ "code.filepath": short_filename,
49
+ "code.lineno": lineno,
50
+ "code.function": func_name,
51
+ }
52
+ except Exception:
53
+ # Silently ignore any issues with frame inspection
54
+ pass
55
+
56
+ return {}
57
+
58
+
59
+ class TraceLevel(Enum):
60
+ """Trace logging levels following OpenTelemetry conventions"""
61
+ DEBUG = "debug"
62
+ INFO = "info"
63
+ WARN = "warn"
64
+ ERROR = "error"
65
+
66
+
67
+ @dataclass
68
+ class Span:
69
+ """Lightweight span representation following OpenAI patterns"""
70
+ id: str = field(default_factory=lambda: str(uuid.uuid4()))
71
+ name: str = ""
72
+ parent_id: Optional[str] = None
73
+ start_time: float = field(default_factory=time.time)
74
+ end_time: Optional[float] = None
75
+ attributes: Dict[str, Any] = field(default_factory=dict)
76
+ events: List[Dict[str, Any]] = field(default_factory=list)
77
+ status: str = "ok"
78
+ error: Optional[str] = None
79
+
80
+ def set_attribute(self, key: str, value: Any):
81
+ """Set span attribute"""
82
+ self.attributes[key] = value
83
+
84
+ def add_event(self, name: str, attributes: Optional[Dict[str, Any]] = None):
85
+ """Add event to span"""
86
+ self.events.append({
87
+ "name": name,
88
+ "timestamp": time.time(),
89
+ "attributes": attributes or {}
90
+ })
91
+
92
+ def set_error(self, error: Exception):
93
+ """Mark span as error"""
94
+ self.status = "error"
95
+ self.error = str(error)
96
+ self.attributes["error.type"] = type(error).__name__
97
+ self.attributes["error.message"] = str(error)
98
+
99
+ def finish(self):
100
+ """Finish the span"""
101
+ if self.end_time is None:
102
+ self.end_time = time.time()
103
+
104
+ @property
105
+ def duration_ms(self) -> float:
106
+ """Get span duration in milliseconds"""
107
+ if self.end_time and self.start_time:
108
+ return (self.end_time - self.start_time) * 1000
109
+ return 0.0
110
+
111
+
112
+ @dataclass
113
+ class Trace:
114
+ """Trace containing multiple spans"""
115
+ id: str = field(default_factory=lambda: str(uuid.uuid4()))
116
+ spans: List[Span] = field(default_factory=list)
117
+ metadata: Dict[str, Any] = field(default_factory=dict)
118
+ start_time: float = field(default_factory=time.time)
119
+ end_time: Optional[float] = None
120
+
121
+ def add_span(self, span: Span):
122
+ """Add span to trace"""
123
+ self.spans.append(span)
124
+
125
+ def finish(self):
126
+ """Finish the trace"""
127
+ if self.end_time is None:
128
+ self.end_time = time.time()
129
+
130
+
131
+ # Context variables for thread-safe trace management (like OpenAI does)
132
+ _current_trace: contextvars.ContextVar[Optional[Trace]] = contextvars.ContextVar('current_trace', default=None)
133
+ _current_span: contextvars.ContextVar[Optional[Span]] = contextvars.ContextVar('current_span', default=None)
134
+
135
+
136
+ class TraceProcessor:
137
+ """Base class for trace processors - follows OpenAI's pluggable architecture"""
138
+
139
+ def process_span(self, span: Span, trace: Trace):
140
+ """Process a finished span"""
141
+ pass
142
+
143
+ def process_trace(self, trace: Trace):
144
+ """Process a finished trace"""
145
+ pass
146
+
147
+
148
+ class SdkCoreTraceProcessor(TraceProcessor):
149
+ """Processor that sends traces to SDK-Core via FFI"""
150
+
151
+ def __init__(self):
152
+ self._bridge = None # Will be set when FFI bridge is available
153
+
154
+ def set_bridge(self, bridge):
155
+ """Set the FFI bridge for communication with SDK-Core"""
156
+ self._bridge = bridge
157
+
158
+ def process_span(self, span: Span, trace: Trace):
159
+ """Send span to SDK-Core"""
160
+ if self._bridge:
161
+ self._send_span_to_core(span, trace)
162
+
163
+ def process_trace(self, trace: Trace):
164
+ """Send complete trace to SDK-Core"""
165
+ if self._bridge:
166
+ self._send_trace_to_core(trace)
167
+
168
+ def _send_span_to_core(self, span: Span, trace: Trace):
169
+ """FFI call to send span data to SDK-Core"""
170
+ span_data = {
171
+ "span_id": span.id,
172
+ "trace_id": trace.id,
173
+ "parent_id": span.parent_id,
174
+ "name": span.name,
175
+ "start_time": span.start_time,
176
+ "end_time": span.end_time,
177
+ "duration_ms": span.duration_ms,
178
+ "attributes": span.attributes,
179
+ "events": span.events,
180
+ "status": span.status,
181
+ "error": span.error,
182
+ }
183
+
184
+ try:
185
+ # This would call SDK-Core FFI when bridge is available
186
+ if hasattr(self._bridge, 'send_span'):
187
+ self._bridge.send_span(span_data)
188
+ except Exception as e:
189
+ # Log error but don't fail - telemetry should never break the application
190
+ print(f"Warning: Failed to send span to SDK-Core: {e}")
191
+
192
+ def _send_trace_to_core(self, trace: Trace):
193
+ """FFI call to send complete trace to SDK-Core"""
194
+ trace_data = {
195
+ "trace_id": trace.id,
196
+ "metadata": trace.metadata,
197
+ "start_time": trace.start_time,
198
+ "end_time": trace.end_time,
199
+ "span_count": len(trace.spans),
200
+ }
201
+
202
+ try:
203
+ # This would call SDK-Core FFI when bridge is available
204
+ if hasattr(self._bridge, 'send_trace'):
205
+ self._bridge.send_trace(trace_data)
206
+ except Exception as e:
207
+ # Log error but don't fail - telemetry should never break the application
208
+ print(f"Warning: Failed to send trace to SDK-Core: {e}")
209
+
210
+
211
+ class LoggingTraceProcessor(TraceProcessor):
212
+ """Processor that logs traces for debugging"""
213
+
214
+ def process_span(self, span: Span, trace: Trace):
215
+ """Log span completion"""
216
+ # Extract file and line info from attributes
217
+ file_info = ""
218
+ if "code.filepath" in span.attributes and "code.lineno" in span.attributes:
219
+ file_info = f" [{span.attributes['code.filepath']}:{span.attributes['code.lineno']}]"
220
+
221
+ if span.status == "error":
222
+ print(f"ERROR: Span {span.name} failed: {span.error}{file_info}")
223
+ else:
224
+ print(f"DEBUG: Span {span.name} completed in {span.duration_ms:.2f}ms{file_info}")
225
+
226
+ def process_trace(self, trace: Trace):
227
+ """Log trace completion"""
228
+ duration = (trace.end_time - trace.start_time) * 1000 if trace.end_time else 0
229
+ print(f"DEBUG: Trace {trace.id} completed with {len(trace.spans)} spans in {duration:.2f}ms")
230
+
231
+
232
+ class TracingConfig:
233
+ """Configuration for tracing system - inspired by OpenAI's approach"""
234
+
235
+ def __init__(self):
236
+ self.enabled = os.getenv("AGNT5_TRACING_ENABLED", "true").lower() == "true"
237
+ self.level = TraceLevel(os.getenv("AGNT5_TRACING_LEVEL", "info"))
238
+ self.sample_rate = float(os.getenv("AGNT5_TRACING_SAMPLE_RATE", "1.0"))
239
+ self.capture_inputs = os.getenv("AGNT5_TRACING_CAPTURE_INPUTS", "true").lower() == "true"
240
+ self.capture_outputs = os.getenv("AGNT5_TRACING_CAPTURE_OUTPUTS", "true").lower() == "true"
241
+ self.max_attribute_length = int(os.getenv("AGNT5_TRACING_MAX_ATTRIBUTE_LENGTH", "1000"))
242
+ self.debug_logging = os.getenv("AGNT5_TRACING_DEBUG", "false").lower() == "true"
243
+
244
+ # Initialize processors
245
+ self.processors: List[TraceProcessor] = []
246
+ self._setup_default_processors()
247
+
248
+ def _setup_default_processors(self):
249
+ """Setup default trace processors"""
250
+ # Always add SDK-Core processor for production telemetry
251
+ self.processors.append(SdkCoreTraceProcessor())
252
+
253
+ # Add logging processor for debugging if enabled
254
+ if self.debug_logging:
255
+ self.processors.append(LoggingTraceProcessor())
256
+
257
+ def should_sample(self) -> bool:
258
+ """Determine if current operation should be sampled"""
259
+ if not self.enabled:
260
+ return False
261
+ if self.sample_rate >= 1.0:
262
+ return True
263
+ if self.sample_rate <= 0.0:
264
+ return False
265
+ return time.time() % 1.0 < self.sample_rate
266
+
267
+
268
+ # Global config instance
269
+ _config = TracingConfig()
270
+
271
+
272
+ class TracingContext:
273
+ """Main tracing context manager - similar to OpenAI's design"""
274
+
275
+ @classmethod
276
+ def set_processors(cls, processors: List[TraceProcessor]):
277
+ """Set trace processors"""
278
+ _config.processors = processors
279
+
280
+ @classmethod
281
+ def add_processor(cls, processor: TraceProcessor):
282
+ """Add a trace processor"""
283
+ _config.processors.append(processor)
284
+
285
+ @classmethod
286
+ def configure(cls, **kwargs):
287
+ """Configure tracing settings"""
288
+ for key, value in kwargs.items():
289
+ if hasattr(_config, key):
290
+ setattr(_config, key, value)
291
+
292
+ @classmethod
293
+ def get_config(cls) -> TracingConfig:
294
+ """Get current tracing configuration"""
295
+ return _config
296
+
297
+ @classmethod
298
+ def set_bridge(cls, bridge):
299
+ """Set FFI bridge for SDK-Core communication"""
300
+ for processor in _config.processors:
301
+ if isinstance(processor, SdkCoreTraceProcessor):
302
+ processor.set_bridge(bridge)
303
+
304
+
305
+ # Core tracing functions following OpenAI patterns
306
+ def start_trace(name: str, metadata: Optional[Dict[str, Any]] = None) -> Trace:
307
+ """Start a new trace"""
308
+ if not _config.should_sample():
309
+ return Trace() # Return dummy trace
310
+
311
+ trace = Trace(metadata=metadata or {})
312
+ trace.metadata["name"] = name
313
+ trace.metadata["service"] = "agnt5-python-sdk"
314
+ trace.metadata["component"] = "python-sdk"
315
+ _current_trace.set(trace)
316
+ return trace
317
+
318
+
319
+ def end_trace() -> Optional[Trace]:
320
+ """End current trace"""
321
+ trace = _current_trace.get()
322
+ if trace:
323
+ trace.finish()
324
+ # Process with all processors
325
+ for processor in _config.processors:
326
+ try:
327
+ processor.process_trace(trace)
328
+ except Exception as e:
329
+ if _config.debug_logging:
330
+ print(f"Warning: Trace processor failed: {e}")
331
+ _current_trace.set(None)
332
+ return trace
333
+
334
+
335
+ @contextmanager
336
+ def trace(name: str, metadata: Optional[Dict[str, Any]] = None):
337
+ """Context manager for traces - OpenAI style"""
338
+ trace_obj = start_trace(name, metadata)
339
+ try:
340
+ yield trace_obj
341
+ finally:
342
+ end_trace()
343
+
344
+
345
+ def start_span(name: str, attributes: Optional[Dict[str, Any]] = None) -> Span:
346
+ """Start a new span"""
347
+ if not _config.should_sample():
348
+ return Span() # Return dummy span
349
+
350
+ current_span = _current_span.get()
351
+ parent_id = current_span.id if current_span else None
352
+
353
+ # Get caller information for file and line number
354
+ caller_info = _get_caller_info(skip_frames=2)
355
+
356
+ # Combine caller info with provided attributes
357
+ safe_attributes = caller_info.copy()
358
+ if attributes:
359
+ for key, value in attributes.items():
360
+ str_value = str(value)
361
+ if len(str_value) > _config.max_attribute_length:
362
+ str_value = str_value[:_config.max_attribute_length] + "..."
363
+ safe_attributes[key] = str_value
364
+
365
+ span = Span(name=name, parent_id=parent_id, attributes=safe_attributes)
366
+
367
+ # Add to current trace
368
+ current_trace = _current_trace.get()
369
+ if current_trace:
370
+ current_trace.add_span(span)
371
+
372
+ _current_span.set(span)
373
+ return span
374
+
375
+
376
+ def end_span() -> Optional[Span]:
377
+ """End current span"""
378
+ span = _current_span.get()
379
+ if span:
380
+ span.finish()
381
+
382
+ # Process with all processors
383
+ current_trace = _current_trace.get()
384
+ for processor in _config.processors:
385
+ try:
386
+ processor.process_span(span, current_trace)
387
+ except Exception as e:
388
+ if _config.debug_logging:
389
+ print(f"Warning: Span processor failed: {e}")
390
+
391
+ # Restore parent span
392
+ if current_trace:
393
+ parent_span = None
394
+ for s in reversed(current_trace.spans):
395
+ if s.id == span.parent_id:
396
+ parent_span = s
397
+ break
398
+ _current_span.set(parent_span)
399
+ else:
400
+ _current_span.set(None)
401
+
402
+ return span
403
+
404
+
405
+ @contextmanager
406
+ def span(name: str, attributes: Optional[Dict[str, Any]] = None):
407
+ """Context manager for spans - OpenAI style"""
408
+ span_obj = start_span(name, attributes)
409
+ try:
410
+ yield span_obj
411
+ except Exception as e:
412
+ span_obj.set_error(e)
413
+ raise
414
+ finally:
415
+ end_span()
416
+
417
+
418
+ # Convenience functions
419
+ def get_current_span() -> Optional[Span]:
420
+ """Get current active span"""
421
+ return _current_span.get()
422
+
423
+
424
+ def get_current_trace() -> Optional[Trace]:
425
+ """Get current active trace"""
426
+ return _current_trace.get()
427
+
428
+
429
+ def set_span_attribute(key: str, value: Any):
430
+ """Set attribute on current span"""
431
+ current_span = _current_span.get()
432
+ if current_span:
433
+ current_span.set_attribute(key, value)
434
+
435
+
436
+ def add_span_event(name: str, attributes: Optional[Dict[str, Any]] = None):
437
+ """Add event to current span"""
438
+ current_span = _current_span.get()
439
+ if current_span:
440
+ current_span.add_event(name, attributes)
441
+
442
+
443
+ def log(level: TraceLevel, message: str, **attributes):
444
+ """Log message with current span context"""
445
+ current_span = _current_span.get()
446
+ if current_span:
447
+ # Get caller information for file and line number
448
+ caller_info = _get_caller_info(skip_frames=2)
449
+
450
+ current_span.add_event(f"log.{level.value}", {
451
+ "message": message,
452
+ **caller_info,
453
+ **attributes
454
+ })
455
+
456
+
457
+ # Convenience logging functions that include file/line info
458
+ def debug(message: str, **attributes):
459
+ """Debug log with file and line information"""
460
+ caller_info = _get_caller_info(skip_frames=2)
461
+ if _config.debug_logging:
462
+ file_info = f" [{caller_info.get('code.filepath', 'unknown')}:{caller_info.get('code.lineno', '?')}]"
463
+ print(f"DEBUG: {message}{file_info}")
464
+ log(TraceLevel.DEBUG, message, **attributes)
465
+
466
+
467
+ def info(message: str, **attributes):
468
+ """Info log with file and line information"""
469
+ caller_info = _get_caller_info(skip_frames=2)
470
+ if _config.debug_logging:
471
+ file_info = f" [{caller_info.get('code.filepath', 'unknown')}:{caller_info.get('code.lineno', '?')}]"
472
+ print(f"INFO: {message}{file_info}")
473
+ log(TraceLevel.INFO, message, **attributes)
474
+
475
+
476
+ def warn(message: str, **attributes):
477
+ """Warning log with file and line information"""
478
+ caller_info = _get_caller_info(skip_frames=2)
479
+ if _config.debug_logging:
480
+ file_info = f" [{caller_info.get('code.filepath', 'unknown')}:{caller_info.get('code.lineno', '?')}]"
481
+ print(f"WARN: {message}{file_info}")
482
+ log(TraceLevel.WARN, message, **attributes)
483
+
484
+
485
+ def error(message: str, **attributes):
486
+ """Error log with file and line information"""
487
+ caller_info = _get_caller_info(skip_frames=2)
488
+ file_info = f" [{caller_info.get('code.filepath', 'unknown')}:{caller_info.get('code.lineno', '?')}]"
489
+ print(f"ERROR: {message}{file_info}")
490
+ log(TraceLevel.ERROR, message, **attributes)
491
+
492
+
493
+ # Decorators for automatic tracing
494
+ def traced(name: Optional[str] = None, capture_args: bool = False, capture_result: bool = False):
495
+ """Decorator for automatic function tracing - OpenAI style"""
496
+ def decorator(func):
497
+ def wrapper(*args, **kwargs):
498
+ span_name = name or f"{func.__module__}.{func.__name__}"
499
+
500
+ span_attributes = {
501
+ "function.name": func.__name__,
502
+ "function.module": func.__module__,
503
+ }
504
+
505
+ if capture_args and _config.capture_inputs:
506
+ # Safely capture arguments (truncate if too long)
507
+ args_str = str(args)[:_config.max_attribute_length]
508
+ kwargs_str = str(kwargs)[:_config.max_attribute_length]
509
+ span_attributes.update({
510
+ "function.args": args_str,
511
+ "function.kwargs": kwargs_str,
512
+ })
513
+
514
+ with span(span_name, span_attributes) as s:
515
+ try:
516
+ result = func(*args, **kwargs)
517
+
518
+ if capture_result and _config.capture_outputs:
519
+ result_str = str(result)[:_config.max_attribute_length]
520
+ s.set_attribute("function.result", result_str)
521
+
522
+ return result
523
+ except Exception as e:
524
+ s.set_error(e)
525
+ raise
526
+
527
+ return wrapper
528
+ return decorator
529
+
530
+
531
+ # Agent-specific tracing helpers
532
+ def trace_agent_run(agent_name: str):
533
+ """Start an agent run trace"""
534
+ return trace(f"agent.{agent_name}.run", {
535
+ "agent.name": agent_name,
536
+ "agent.type": "high_level",
537
+ })
538
+
539
+
540
+ def trace_tool_call(tool_name: str):
541
+ """Start a tool call span"""
542
+ return span(f"tool.{tool_name}", {
543
+ "tool.name": tool_name,
544
+ "tool.type": "function",
545
+ })
546
+
547
+
548
+ def trace_llm_call(model: str, prompt_length: int):
549
+ """Start an LLM call span"""
550
+ return span("llm.call", {
551
+ "llm.model": model,
552
+ "llm.prompt_length": prompt_length,
553
+ "llm.provider": "anthropic",
554
+ })
555
+
556
+
557
+ def trace_workflow_step(step_name: str, step_index: int):
558
+ """Start a workflow step span"""
559
+ return span(f"workflow.step.{step_name}", {
560
+ "workflow.step.name": step_name,
561
+ "workflow.step.index": step_index,
562
+ })
563
+
564
+
565
+ def trace_memory_operation(operation: str, memory_type: str):
566
+ """Start a memory operation span"""
567
+ return span(f"memory.{operation}", {
568
+ "memory.operation": operation,
569
+ "memory.type": memory_type,
570
+ })
571
+
572
+
573
+ # Public API for configuration
574
+ def enable_tracing():
575
+ """Enable tracing"""
576
+ _config.enabled = True
577
+
578
+
579
+ def disable_tracing():
580
+ """Disable tracing"""
581
+ _config.enabled = False
582
+
583
+
584
+ def is_tracing_enabled() -> bool:
585
+ """Check if tracing is enabled"""
586
+ return _config.enabled and _config.should_sample()
587
+
588
+
589
+ def set_sample_rate(rate: float):
590
+ """Set sampling rate (0.0 to 1.0)"""
591
+ _config.sample_rate = max(0.0, min(1.0, rate))
592
+
593
+
594
+ def get_trace_stats() -> Dict[str, Any]:
595
+ """Get tracing statistics"""
596
+ current_trace = get_current_trace()
597
+ current_span = get_current_span()
598
+
599
+ return {
600
+ "enabled": _config.enabled,
601
+ "sample_rate": _config.sample_rate,
602
+ "current_trace_id": current_trace.id if current_trace else None,
603
+ "current_span_id": current_span.id if current_span else None,
604
+ "processor_count": len(_config.processors),
605
+ "config": {
606
+ "capture_inputs": _config.capture_inputs,
607
+ "capture_outputs": _config.capture_outputs,
608
+ "debug_logging": _config.debug_logging,
609
+ "max_attribute_length": _config.max_attribute_length,
610
+ }
611
+ }
612
+
613
+
614
+ # Initialize telemetry on module import
615
+ def _initialize_telemetry():
616
+ """Initialize telemetry system with environment-based configuration"""
617
+ if _config.debug_logging:
618
+ print("AGNT5 Tracing: Initialized with zero-dependency telemetry")
619
+ print(f"AGNT5 Tracing: Sample rate = {_config.sample_rate}")
620
+ print(f"AGNT5 Tracing: Processors = {len(_config.processors)}")
621
+
622
+
623
+ # Call initialization
624
+ _initialize_telemetry()