agnt5 0.1.3__cp39-abi3-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agnt5 might be problematic. Click here for more details.

agnt5/__init__.py ADDED
@@ -0,0 +1,23 @@
1
+ """
2
+ AGNT5 Python SDK - Build durable, resilient agent-first applications.
3
+
4
+ This SDK provides high-level components for building agents, tools, and workflows
5
+ with built-in durability guarantees and state management, backed by a high-performance
6
+ Rust core.
7
+ """
8
+
9
+ from .version import _get_version
10
+ from ._compat import _rust_available, _import_error
11
+ from .decorators import function
12
+ from .worker import Worker
13
+ from .logging import install_opentelemetry_logging, remove_opentelemetry_logging
14
+
15
+ __version__ = _get_version()
16
+
17
+ __all__ = [
18
+ 'function',
19
+ 'Worker',
20
+ 'install_opentelemetry_logging',
21
+ 'remove_opentelemetry_logging',
22
+ '__version__',
23
+ ]
agnt5/_compat.py ADDED
@@ -0,0 +1,15 @@
1
+ """
2
+ Compatibility utilities for the AGNT5 Python SDK.
3
+
4
+ This module handles runtime compatibility checks and provides utilities
5
+ for cross-referencing throughout the project.
6
+ """
7
+
8
+ # Check if Rust core is available
9
+ try:
10
+ from . import _core
11
+ _rust_available = True
12
+ _import_error = None
13
+ except ImportError as e:
14
+ _rust_available = False
15
+ _import_error = e
agnt5/_core.abi3.so ADDED
Binary file
agnt5/components.py ADDED
@@ -0,0 +1,278 @@
1
+ """
2
+ Component abstraction layer for AGNT5 SDK.
3
+
4
+ This module defines the base classes for all component types:
5
+ - Functions: Stateless operations
6
+ - Objects: Virtual objects with persistent state
7
+ - Flows: Multi-step workflows with orchestration
8
+ """
9
+
10
+ from abc import ABC, abstractmethod
11
+ from enum import Enum
12
+ from typing import Any, Callable, Dict, List, Optional, Type, Union
13
+ import uuid
14
+ import json
15
+ import time
16
+
17
+
18
+ class ComponentType(Enum):
19
+ """Component types matching protobuf enum"""
20
+ FUNCTION = "function"
21
+ OBJECT = "object"
22
+ FLOW = "flow"
23
+
24
+
25
+ class ExecutionContext:
26
+ """
27
+ Unified execution context for all component types.
28
+
29
+ Provides methods for:
30
+ - Functions: Simple input/output
31
+ - Objects: State management and mutations
32
+ - Flows: Orchestration and step coordination
33
+ """
34
+
35
+ def __init__(self, invocation_id: str, component_type: ComponentType):
36
+ self.invocation_id = invocation_id
37
+ self.component_type = component_type
38
+
39
+ # Object-specific state management
40
+ self.object_id: Optional[str] = None
41
+ self.state: Optional[Dict[str, Any]] = None
42
+ self.state_mutations: List[Dict[str, Any]] = []
43
+
44
+ # Flow-specific orchestration
45
+ self.flow_instance_id: Optional[str] = None
46
+ self.flow_step: int = 0
47
+ self.checkpoint_data: Optional[Dict[str, Any]] = None
48
+
49
+ # Extensible metadata
50
+ self.metadata: Dict[str, str] = {}
51
+
52
+ # State management methods (for Objects)
53
+ def get_state(self, key: str, default: Any = None) -> Any:
54
+ """Get a value from object state"""
55
+ if self.state is None:
56
+ return default
57
+ return self.state.get(key, default)
58
+
59
+ def set_state(self, key: str, value: Any) -> None:
60
+ """Set a value in object state (records mutation)"""
61
+ if self.state is None:
62
+ self.state = {}
63
+
64
+ old_value = self.state.get(key)
65
+ self.state[key] = value
66
+
67
+ # Record mutation for persistence
68
+ self.state_mutations.append({
69
+ "operation": "set",
70
+ "key": key,
71
+ "old_value": old_value,
72
+ "new_value": value,
73
+ "timestamp": int(time.time() * 1000)
74
+ })
75
+
76
+ def delete_state(self, key: str) -> Any:
77
+ """Delete a value from object state"""
78
+ if self.state is None or key not in self.state:
79
+ return None
80
+
81
+ old_value = self.state.pop(key)
82
+
83
+ # Record mutation
84
+ self.state_mutations.append({
85
+ "operation": "delete",
86
+ "key": key,
87
+ "old_value": old_value,
88
+ "new_value": None,
89
+ "timestamp": int(time.time() * 1000)
90
+ })
91
+
92
+ return old_value
93
+
94
+ # Flow orchestration methods (for Flows - future implementation)
95
+ async def call_function(self, function_name: str, input_data: Any) -> Any:
96
+ """Call another function from within a flow"""
97
+ # TODO: Implement in Phase 3 (Flows)
98
+ raise NotImplementedError("Flow orchestration coming in Phase 3")
99
+
100
+ async def call_object(self, object_type: str, object_id: str,
101
+ method: str, input_data: Any) -> Any:
102
+ """Call a method on a virtual object from within a flow"""
103
+ # TODO: Implement in Phase 3 (Flows)
104
+ raise NotImplementedError("Flow orchestration coming in Phase 3")
105
+
106
+ async def sleep(self, duration_seconds: int) -> None:
107
+ """Durable sleep in a flow"""
108
+ # TODO: Implement in Phase 3 (Flows)
109
+ raise NotImplementedError("Flow orchestration coming in Phase 3")
110
+
111
+ async def wait_for_event(self, event_type: str, timeout_seconds: int = None) -> Any:
112
+ """Wait for external event in a flow"""
113
+ # TODO: Implement in Phase 3 (Flows)
114
+ raise NotImplementedError("Flow orchestration coming in Phase 3")
115
+
116
+
117
+ class Component(ABC):
118
+ """Base class for all component types"""
119
+
120
+ def __init__(self, name: str, component_type: ComponentType):
121
+ self.name = name
122
+ self.component_type = component_type
123
+ self.metadata: Dict[str, str] = {}
124
+
125
+ @abstractmethod
126
+ async def invoke(self, context: ExecutionContext, input_data: Any) -> Any:
127
+ """Execute the component with given context and input"""
128
+ pass
129
+
130
+ def to_component_info(self) -> Dict[str, Any]:
131
+ """Convert to ComponentInfo for registration"""
132
+ return {
133
+ "name": self.name,
134
+ "component_type": self.component_type.value,
135
+ "metadata": self.metadata
136
+ }
137
+
138
+
139
+ class FunctionComponent(Component):
140
+ """Function component - stateless operation"""
141
+
142
+ def __init__(self, name: str, handler: Callable, **kwargs):
143
+ super().__init__(name, ComponentType.FUNCTION)
144
+ self.handler = handler
145
+ self.streaming = kwargs.get('streaming', False)
146
+
147
+ # Add function-specific metadata
148
+ self.metadata.update({
149
+ 'streaming': str(self.streaming),
150
+ 'handler_name': handler.__name__
151
+ })
152
+
153
+ async def invoke(self, context: ExecutionContext, input_data: Any) -> Any:
154
+ """Execute the function"""
155
+ # Functions get simple context and input
156
+ if self.streaming:
157
+ # For streaming functions, return async generator
158
+ result = self.handler(context, input_data)
159
+ if hasattr(result, '__aiter__'):
160
+ return result
161
+ else:
162
+ # Convert sync generator to async
163
+ async def async_generator():
164
+ for item in result:
165
+ yield item
166
+ return async_generator()
167
+ else:
168
+ # Regular function call
169
+ result = self.handler(context, input_data)
170
+ # Handle both sync and async functions
171
+ if hasattr(result, '__await__'):
172
+ return await result
173
+ return result
174
+
175
+
176
+ class ObjectComponent(Component):
177
+ """Virtual Object component - stateful entity"""
178
+
179
+ def __init__(self, name: str, object_class: Type, **kwargs):
180
+ super().__init__(name, ComponentType.OBJECT)
181
+ self.object_class = object_class
182
+
183
+ # Add object-specific metadata
184
+ self.metadata.update({
185
+ 'class_name': object_class.__name__,
186
+ 'methods': [m for m in dir(object_class)
187
+ if not m.startswith('_') and callable(getattr(object_class, m))]
188
+ })
189
+
190
+ async def invoke(self, context: ExecutionContext, input_data: Any) -> Any:
191
+ """Execute a method on the virtual object"""
192
+ # TODO: Implement in Phase 2 (Objects)
193
+ # For now, raise helpful error
194
+ raise NotImplementedError(
195
+ f"Virtual Objects coming in Phase 2. "
196
+ f"Component '{self.name}' is registered but not yet executable. "
197
+ f"Use @function decorator for now."
198
+ )
199
+
200
+
201
+ class FlowComponent(Component):
202
+ """Flow component - multi-step workflow"""
203
+
204
+ def __init__(self, name: str, flow_handler: Callable, **kwargs):
205
+ super().__init__(name, ComponentType.FLOW)
206
+ self.flow_handler = flow_handler
207
+ self.steps = kwargs.get('steps', [])
208
+
209
+ # Add flow-specific metadata
210
+ self.metadata.update({
211
+ 'handler_name': flow_handler.__name__,
212
+ 'step_count': str(len(self.steps)) if self.steps else 'dynamic'
213
+ })
214
+
215
+ async def invoke(self, context: ExecutionContext, input_data: Any) -> Any:
216
+ """Execute the workflow"""
217
+ # TODO: Implement in Phase 3 (Flows)
218
+ # For now, raise helpful error
219
+ raise NotImplementedError(
220
+ f"Flows/Workflows coming in Phase 3. "
221
+ f"Component '{self.name}' is registered but not yet executable. "
222
+ f"Use @function decorator for now."
223
+ )
224
+
225
+
226
+ # Helper classes for future phases
227
+
228
+ class StateManager:
229
+ """Manages state persistence for virtual objects (Phase 2)"""
230
+
231
+ def __init__(self):
232
+ # Will be implemented with actual state backend
233
+ pass
234
+
235
+ async def load_state(self, object_type: str, object_id: str) -> Optional[Dict[str, Any]]:
236
+ """Load object state from persistent storage"""
237
+ # TODO: Implement with NATS KV or similar
238
+ return None
239
+
240
+ async def save_state(self, object_type: str, object_id: str,
241
+ state: Dict[str, Any],
242
+ mutations: List[Dict[str, Any]]) -> None:
243
+ """Save object state to persistent storage"""
244
+ # TODO: Implement with NATS KV or similar
245
+ pass
246
+
247
+
248
+ class FlowExecutor:
249
+ """Manages workflow execution and orchestration (Phase 3)"""
250
+
251
+ def __init__(self):
252
+ # Will be implemented with actual flow execution engine
253
+ pass
254
+
255
+ async def execute_step(self, flow_instance_id: str, step: int,
256
+ input_data: Any) -> Any:
257
+ """Execute a single step in a workflow"""
258
+ # TODO: Implement with deterministic replay
259
+ pass
260
+
261
+ async def checkpoint(self, flow_instance_id: str,
262
+ checkpoint_data: Dict[str, Any]) -> None:
263
+ """Save workflow checkpoint"""
264
+ # TODO: Implement with journal persistence
265
+ pass
266
+
267
+
268
+ # Export main classes
269
+ __all__ = [
270
+ 'ComponentType',
271
+ 'ExecutionContext',
272
+ 'Component',
273
+ 'FunctionComponent',
274
+ 'ObjectComponent',
275
+ 'FlowComponent',
276
+ 'StateManager',
277
+ 'FlowExecutor'
278
+ ]
agnt5/decorators.py ADDED
@@ -0,0 +1,240 @@
1
+ """
2
+ Function decorators for AGNT5 workers.
3
+
4
+ This module provides decorators for registering functions as handlers
5
+ that can be invoked through the AGNT5 platform.
6
+ """
7
+
8
+ import functools
9
+ import inspect
10
+ import logging
11
+ from typing import Any, Callable, Dict, List, Optional
12
+
13
+ # Set default logging level to DEBUG
14
+ logging.getLogger().setLevel(logging.DEBUG)
15
+ logger = logging.getLogger(__name__)
16
+
17
+ # Global registry of decorated functions
18
+ _function_registry: Dict[str, Callable] = {}
19
+
20
+
21
+ def function(name: str = None):
22
+ """
23
+ Decorator to register a function as an AGNT5 handler.
24
+
25
+ Args:
26
+ name: The name to register the function under. If None, uses the function's name.
27
+
28
+ Usage:
29
+ @function("add_numbers")
30
+ def add_numbers(ctx, a: int, b: int) -> int:
31
+ return a + b
32
+
33
+ @function()
34
+ def greet_user(ctx, name: str) -> str:
35
+ return f"Hello, {name}!"
36
+ """
37
+ def decorator(func: Callable) -> Callable:
38
+ handler_name = name if name is not None else func.__name__
39
+
40
+ # Store function metadata
41
+ func._agnt5_handler_name = handler_name
42
+ func._agnt5_is_function = True
43
+
44
+ # Register in global registry
45
+ _function_registry[handler_name] = func
46
+
47
+ logger.debug(f"Registered function handler: {handler_name}")
48
+
49
+ @functools.wraps(func)
50
+ def wrapper(*args, **kwargs):
51
+ return func(*args, **kwargs)
52
+
53
+ # Copy metadata to wrapper
54
+ wrapper._agnt5_handler_name = handler_name
55
+ wrapper._agnt5_is_function = True
56
+
57
+ return wrapper
58
+
59
+ return decorator
60
+
61
+
62
+ def get_registered_functions() -> Dict[str, Callable]:
63
+ """
64
+ Get all registered function handlers.
65
+
66
+ Returns:
67
+ Dictionary mapping handler names to functions
68
+ """
69
+ return _function_registry.copy()
70
+
71
+
72
+ def get_function_metadata(func: Callable) -> Optional[Dict[str, Any]]:
73
+ """
74
+ Extract metadata from a decorated function.
75
+
76
+ Args:
77
+ func: The function to extract metadata from
78
+
79
+ Returns:
80
+ Dictionary with function metadata or None if not decorated
81
+ """
82
+ if not hasattr(func, '_agnt5_is_function'):
83
+ return None
84
+
85
+ signature = inspect.signature(func)
86
+ parameters = []
87
+ param_items = list(signature.parameters.items())
88
+
89
+ for i, (param_name, param) in enumerate(param_items):
90
+ if i == 0 and param_name == 'ctx': # Skip context parameter if it's the first one
91
+ continue
92
+
93
+ param_info = {
94
+ 'name': param_name,
95
+ 'type': 'any' # Default type, could be enhanced with type hints
96
+ }
97
+
98
+ # Extract type information if available
99
+ if param.annotation != inspect.Parameter.empty:
100
+ param_info['type'] = str(param.annotation.__name__ if hasattr(param.annotation, '__name__') else param.annotation)
101
+
102
+ if param.default != inspect.Parameter.empty:
103
+ param_info['default'] = param.default
104
+
105
+ parameters.append(param_info)
106
+
107
+ return {
108
+ 'name': func._agnt5_handler_name,
109
+ 'type': 'function',
110
+ 'parameters': parameters,
111
+ 'return_type': str(signature.return_annotation.__name__ if signature.return_annotation != inspect.Parameter.empty else 'any')
112
+ }
113
+
114
+
115
+ # Alias for more intuitive usage
116
+ handler = function
117
+
118
+
119
+ def clear_registry():
120
+ """Clear the function registry. Mainly for testing."""
121
+ global _function_registry
122
+ _function_registry.clear()
123
+
124
+
125
+ def invoke_function(handler_name: str, input_data: bytes, context: Any = None) -> bytes:
126
+ """
127
+ Invoke a registered function handler.
128
+
129
+ Args:
130
+ handler_name: Name of the handler to invoke
131
+ input_data: Input data as bytes (will be decoded from JSON)
132
+ context: Execution context
133
+
134
+ Returns:
135
+ Function result as bytes (JSON encoded)
136
+
137
+ Raises:
138
+ ValueError: If handler is not found
139
+ RuntimeError: If function execution fails
140
+ """
141
+ import json
142
+ import traceback
143
+
144
+ # Input validation
145
+ if not handler_name:
146
+ error_msg = "Empty handler name provided"
147
+ logger.error(error_msg)
148
+ raise ValueError(error_msg)
149
+
150
+ if handler_name not in _function_registry:
151
+ error_msg = f"Handler '{handler_name}' not found in registry. Available handlers: {list(_function_registry.keys())}"
152
+ logger.error(error_msg)
153
+ raise ValueError(error_msg)
154
+
155
+ func = _function_registry[handler_name]
156
+ logger.info(f"Invoking handler: {handler_name}")
157
+
158
+ try:
159
+ # Decode input data
160
+ if input_data:
161
+ logger.debug(f"Processing {len(input_data)} bytes for {handler_name}")
162
+
163
+ # Try direct JSON first
164
+ try:
165
+ raw_data = input_data.decode('utf-8')
166
+ input_params = json.loads(raw_data)
167
+ logger.info(f"Decoded JSON input for {handler_name}: {type(input_params)} with keys: {list(input_params.keys()) if isinstance(input_params, dict) else 'non-dict'}")
168
+ logger.debug(f"Input parameters: {input_params}")
169
+ except (UnicodeDecodeError, json.JSONDecodeError):
170
+ # Fallback to protobuf extraction
171
+ logger.debug(f"JSON decoding failed, trying protobuf extraction for {handler_name}")
172
+ start_idx = input_data.find(b'\x1a')
173
+ if start_idx == -1 or start_idx + 1 >= len(input_data):
174
+ logger.error(f"Invalid data format for {handler_name}. Length: {len(input_data)}, Hex: {input_data.hex()}")
175
+ raise RuntimeError("Invalid input data - not JSON and no protobuf marker found")
176
+
177
+ json_length = input_data[start_idx + 1]
178
+ json_start = start_idx + 2
179
+
180
+ if json_start + json_length > len(input_data):
181
+ raise RuntimeError(f"Protobuf structure invalid - length {json_length} exceeds data")
182
+
183
+ json_bytes = input_data[json_start:json_start + json_length]
184
+ raw_data = json_bytes.decode('utf-8')
185
+ input_params = json.loads(raw_data)
186
+ logger.info(f"Extracted from protobuf for {handler_name}: {type(input_params)} with keys: {list(input_params.keys()) if isinstance(input_params, dict) else 'non-dict'}")
187
+ logger.debug(f"Extracted parameters: {input_params}")
188
+
189
+ else:
190
+ input_params = {}
191
+ logger.debug(f"No input data provided for {handler_name}")
192
+
193
+ # Execute function
194
+ try:
195
+ sig = inspect.signature(func)
196
+ params = list(sig.parameters.keys())
197
+
198
+ logger.info(f"Calling {handler_name} with signature: {sig}")
199
+
200
+ if params and params[0] == 'ctx':
201
+ if isinstance(input_params, dict):
202
+ logger.debug(f"Calling {handler_name}(ctx, **{input_params})")
203
+ result = func(context, **input_params)
204
+ else:
205
+ logger.debug(f"Calling {handler_name}(ctx, {input_params})")
206
+ result = func(context, input_params)
207
+ else:
208
+ if isinstance(input_params, dict):
209
+ logger.debug(f"Calling {handler_name}(**{input_params})")
210
+ result = func(**input_params)
211
+ else:
212
+ logger.debug(f"Calling {handler_name}({input_params})")
213
+ result = func(input_params)
214
+
215
+ except TypeError as e:
216
+ logger.error(f"Signature mismatch in {handler_name}: {e}. Expected: {sig}, Got: {input_params}")
217
+ raise RuntimeError(f"Function signature mismatch: {e}")
218
+
219
+ except Exception as e:
220
+ logger.error(f"Function {handler_name} failed: {type(e).__name__}: {e}")
221
+ raise RuntimeError(f"Function execution failed: {e}")
222
+
223
+ # Encode result
224
+ if result is None:
225
+ return b""
226
+
227
+ try:
228
+ result_json = json.dumps(result)
229
+ return result_json.encode('utf-8')
230
+ except (TypeError, ValueError, UnicodeEncodeError) as e:
231
+ logger.error(f"Cannot serialize/encode result from {handler_name}: {type(result)} - {e}")
232
+ raise RuntimeError(f"Result serialization/encoding error: {e}")
233
+
234
+ except RuntimeError:
235
+ raise
236
+
237
+ except Exception as e:
238
+ logger.error(f"Unexpected error in {handler_name}: {type(e).__name__}: {e}")
239
+ logger.debug(f"Stack trace: {traceback.format_exc()}")
240
+ raise RuntimeError(f"Unexpected error: {e}")
agnt5/logging.py ADDED
@@ -0,0 +1,140 @@
1
+ """
2
+ OpenTelemetry logging integration for AGNT5 Python SDK.
3
+
4
+ This module provides a logging handler that forwards Python logs to the Rust core
5
+ for integration with OpenTelemetry. All logs are automatically correlated with
6
+ traces and sent to the OTLP collector.
7
+ """
8
+
9
+ import logging
10
+ import os
11
+ from typing import Optional
12
+
13
+ from ._compat import _rust_available
14
+
15
+ if _rust_available:
16
+ from ._core import log_from_python
17
+
18
+
19
+ class OpenTelemetryHandler(logging.Handler):
20
+ """
21
+ A logging handler that forwards Python logs to Rust for OpenTelemetry integration.
22
+
23
+ This handler automatically captures all Python logs and forwards them to the
24
+ Rust core where they are integrated with OpenTelemetry tracing and sent to
25
+ the OTLP collector. Logs are automatically correlated with active traces.
26
+ """
27
+
28
+ def __init__(self, level: int = logging.NOTSET):
29
+ super().__init__(level)
30
+
31
+ if not _rust_available:
32
+ raise RuntimeError("OpenTelemetry logging handler requires Rust core")
33
+
34
+ def emit(self, record: logging.LogRecord) -> None:
35
+ """
36
+ Forward a log record to Rust for OpenTelemetry integration.
37
+
38
+ Args:
39
+ record: The Python log record to forward
40
+ """
41
+ try:
42
+ # Format the message
43
+ message = self.format(record)
44
+
45
+ # Extract metadata for Rust
46
+ level = record.levelname
47
+ target = record.name # Logger name (e.g., 'agnt5.worker')
48
+ module_path = getattr(record, 'module', record.name)
49
+ filename = getattr(record, 'pathname', None)
50
+ line = getattr(record, 'lineno', None)
51
+
52
+ # Make filename relative if it's absolute
53
+ if filename and os.path.isabs(filename):
54
+ try:
55
+ # Try to make it relative to current working directory
56
+ filename = os.path.relpath(filename)
57
+ except ValueError:
58
+ # If relpath fails (e.g., different drives on Windows), use basename
59
+ filename = os.path.basename(filename)
60
+
61
+ # Forward to Rust core - silently ignore if telemetry not ready yet
62
+ try:
63
+ log_from_python(
64
+ level=level,
65
+ message=message,
66
+ target=target,
67
+ module_path=module_path,
68
+ filename=filename,
69
+ line=line
70
+ )
71
+ except Exception:
72
+ # Silently ignore if Rust telemetry system not ready yet
73
+ # This handles the timing issue during startup
74
+ pass
75
+
76
+ except Exception as e:
77
+ # Don't let logging errors crash the application
78
+ # Use handleError to maintain Python logging standards
79
+ self.handleError(record)
80
+
81
+
82
+ def install_opentelemetry_logging(
83
+ logger: Optional[logging.Logger] = None,
84
+ level: int = logging.INFO,
85
+ format_string: Optional[str] = None
86
+ ) -> OpenTelemetryHandler:
87
+ """
88
+ Install OpenTelemetry logging handler on a logger.
89
+
90
+ Args:
91
+ logger: Logger to install handler on. If None, uses root logger.
92
+ level: Minimum log level to forward to OpenTelemetry
93
+ format_string: Optional format string for log messages
94
+
95
+ Returns:
96
+ The installed OpenTelemetryHandler instance
97
+
98
+ Example:
99
+ # Install on root logger (captures all logs)
100
+ install_opentelemetry_logging()
101
+
102
+ # Install on specific logger
103
+ logger = logging.getLogger('my_app')
104
+ install_opentelemetry_logging(logger, level=logging.DEBUG)
105
+ """
106
+ if logger is None:
107
+ logger = logging.getLogger()
108
+
109
+ # Create handler
110
+ handler = OpenTelemetryHandler(level=level)
111
+
112
+ # Set formatter if provided
113
+ if format_string:
114
+ formatter = logging.Formatter(format_string)
115
+ handler.setFormatter(formatter)
116
+
117
+ # Install handler
118
+ logger.addHandler(handler)
119
+
120
+ return handler
121
+
122
+
123
+ def remove_opentelemetry_logging(logger: Optional[logging.Logger] = None) -> None:
124
+ """
125
+ Remove OpenTelemetry logging handlers from a logger.
126
+
127
+ Args:
128
+ logger: Logger to remove handlers from. If None, uses root logger.
129
+ """
130
+ if logger is None:
131
+ logger = logging.getLogger()
132
+
133
+ # Remove all OpenTelemetryHandler instances
134
+ handlers_to_remove = [
135
+ handler for handler in logger.handlers
136
+ if isinstance(handler, OpenTelemetryHandler)
137
+ ]
138
+
139
+ for handler in handlers_to_remove:
140
+ logger.removeHandler(handler)