PECLibrary 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- PECLibrary/config/__init__.py +14 -0
- PECLibrary/config/defaults.py +59 -0
- PECLibrary/config/settings.py +178 -0
- PECLibrary/decorators/__init__.py +17 -0
- PECLibrary/decorators/capture.py +279 -0
- PECLibrary/models/__init__.py +0 -0
- PECLibrary/models/captured_models.py +206 -0
- PECLibrary/models/pydantic_compat.py +75 -0
- PECLibrary/services/__init__.py +38 -0
- PECLibrary/services/error_capture.py +297 -0
- PECLibrary/services/storage_service.py +365 -0
- PECLibrary/services/transport.py +444 -0
- PECLibrary/utils/__init__.py +40 -0
- PECLibrary/utils/context_managers.py +149 -0
- PECLibrary/utils/decorators.py +0 -0
- PECLibrary/utils/helpers.py +319 -0
- PECLibrary/utils/logger_config.py +68 -0
- PECLibrary/utils/monitor_errors.py +58 -0
- peclibrary-0.7.0.dist-info/METADATA +312 -0
- peclibrary-0.7.0.dist-info/RECORD +22 -0
- peclibrary-0.7.0.dist-info/WHEEL +5 -0
- peclibrary-0.7.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration module for palantir-event-capture
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .settings import Settings, get_settings, configure, reset_settings
|
|
6
|
+
from . import defaults
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"Settings",
|
|
10
|
+
"get_settings",
|
|
11
|
+
"configure",
|
|
12
|
+
"reset_settings",
|
|
13
|
+
"defaults"
|
|
14
|
+
]
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Default configuration values for palantir-event-capture
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from ..models.captured_models import Severity, EventType, PipelineComponent
|
|
6
|
+
|
|
7
|
+
# Kafka Configuration
|
|
8
|
+
DEFAULT_KAFKA_BOOTSTRAP_SERVERS_RCA = "kafka01.prod.bt:9092,kafka02.prod.bt:9092,kafka03.prod.bt:9092,kafka04.prod.bt:9092"
|
|
9
|
+
DEFAULT_KAFKA_EVENT_TOPIC_RCA = "rca.events"
|
|
10
|
+
DEFAULT_KAFKA_ENABLED_RCA = True # Disabled by default to avoid connection errors
|
|
11
|
+
DEFAULT_KAFKA_COMPRESSION_RCA = "gzip"
|
|
12
|
+
DEFAULT_KAFKA_BATCH_SIZE_RCA = 16384
|
|
13
|
+
DEFAULT_KAFKA_LINGER_MS_RCA = 10
|
|
14
|
+
|
|
15
|
+
# Logging Configuration
|
|
16
|
+
DEFAULT_LOG_LEVEL = "INFO"
|
|
17
|
+
DEFAULT_LOG_TO_CONSOLE = True
|
|
18
|
+
DEFAULT_LOG_FORMAT = (
|
|
19
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
|
|
20
|
+
"<level>{level: <8}</level> | "
|
|
21
|
+
"<cyan>{name}</cyan>:<cyan>{function}</cyan> - "
|
|
22
|
+
"<level>{message}</level>"
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
# Event Capture Settings
|
|
26
|
+
DEFAULT_SEND_TO_TRANSPORT = True
|
|
27
|
+
DEFAULT_COMPONENT = PipelineComponent.UNKNOWN
|
|
28
|
+
DEFAULT_SEVERITY = Severity.MEDIUM
|
|
29
|
+
DEFAULT_EVENT_TYPE = EventType.ERROR
|
|
30
|
+
DEFAULT_RERAISE = True
|
|
31
|
+
DEFAULT_ENABLE_STACK_TRACES = True
|
|
32
|
+
DEFAULT_MAX_STACK_TRACE_LENGTH = 5000 # characters
|
|
33
|
+
DEFAULT_USE_BACKGROUND_TRANSPORT = False
|
|
34
|
+
|
|
35
|
+
# Retry Configuration
|
|
36
|
+
DEFAULT_MAX_RETRIES = 3
|
|
37
|
+
DEFAULT_RETRY_DELAY = 1.0 # seconds
|
|
38
|
+
DEFAULT_RETRY_BACKOFF = 2.0 # multiplier
|
|
39
|
+
DEFAULT_CAPTURE_ALL_RETRIES = False # Only capture final failure by default
|
|
40
|
+
|
|
41
|
+
# Performance Settings
|
|
42
|
+
DEFAULT_ASYNC_TRANSPORT = False
|
|
43
|
+
DEFAULT_EVENT_BATCH_SIZE = 100
|
|
44
|
+
DEFAULT_EVENT_QUEUE_SIZE = 1000
|
|
45
|
+
|
|
46
|
+
# Storage Configuration (optional)
|
|
47
|
+
DEFAULT_STORAGE_ENABLED = False
|
|
48
|
+
DEFAULT_STORAGE_PATH = "./event_logs"
|
|
49
|
+
DEFAULT_STORAGE_MAX_FILES = 100
|
|
50
|
+
DEFAULT_STORAGE_MAX_FILE_SIZE_MB = 10
|
|
51
|
+
|
|
52
|
+
# Security & Privacy
|
|
53
|
+
DEFAULT_SANITIZE_STACK_TRACES = False
|
|
54
|
+
DEFAULT_PII_PATTERNS = [] # Regex patterns to redact
|
|
55
|
+
DEFAULT_MAX_METADATA_SIZE = 10000 # characters
|
|
56
|
+
|
|
57
|
+
# Event Registry
|
|
58
|
+
DEFAULT_ID_REGISTRY_CLEANUP_INTERVAL = 3600 # seconds
|
|
59
|
+
DEFAULT_ID_REGISTRY_MAX_SIZE = 100000 # events
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration management for palantir-event-capture
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from typing import Any, Dict, Optional
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from dotenv import load_dotenv
|
|
10
|
+
|
|
11
|
+
from . import defaults
|
|
12
|
+
from ..models.captured_models import Severity, EventType, PipelineComponent
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Settings:
|
|
16
|
+
"""Global settings manager"""
|
|
17
|
+
|
|
18
|
+
def __init__(self):
|
|
19
|
+
# Load environment variables from .env file if it exists
|
|
20
|
+
load_dotenv()
|
|
21
|
+
|
|
22
|
+
# Kafka Configuration
|
|
23
|
+
self.KAFKA_BOOTSTRAP_SERVERS_RCA: str = self._get_env(
|
|
24
|
+
"KAFKA_BOOTSTRAP_SERVERS_RCA", defaults.DEFAULT_KAFKA_BOOTSTRAP_SERVERS_RCA
|
|
25
|
+
)
|
|
26
|
+
self.KAFKA_EVENT_TOPIC_RCA: str = self._get_env(
|
|
27
|
+
"KAFKA_EVENT_TOPIC_RCA", defaults.DEFAULT_KAFKA_EVENT_TOPIC_RCA
|
|
28
|
+
)
|
|
29
|
+
self.KAFKA_ENABLED_RCA: bool = self._get_env_bool(
|
|
30
|
+
"KAFKA_ENABLED_RCA", defaults.DEFAULT_KAFKA_ENABLED_RCA
|
|
31
|
+
)
|
|
32
|
+
self.KAFKA_COMPRESSION_RCA: str = self._get_env(
|
|
33
|
+
"KAFKA_COMPRESSION_RCA", defaults.DEFAULT_KAFKA_COMPRESSION_RCA
|
|
34
|
+
)
|
|
35
|
+
self.KAFKA_BATCH_SIZE_RCA: int = self._get_env_int(
|
|
36
|
+
"KAFKA_BATCH_SIZE_RCA", defaults.DEFAULT_KAFKA_BATCH_SIZE_RCA
|
|
37
|
+
)
|
|
38
|
+
self.KAFKA_LINGER_MS_RCA: int = self._get_env_int(
|
|
39
|
+
"KAFKA_LINGER_MS_RCA", defaults.DEFAULT_KAFKA_LINGER_MS_RCA
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Logging Configuration
|
|
43
|
+
self.log_level: str = self._get_env("LOG_LEVEL", defaults.DEFAULT_LOG_LEVEL)
|
|
44
|
+
self.log_to_console: bool = self._get_env_bool(
|
|
45
|
+
"LOG_TO_CONSOLE", defaults.DEFAULT_LOG_TO_CONSOLE
|
|
46
|
+
)
|
|
47
|
+
self.log_format: str = self._get_env("LOG_FORMAT", defaults.DEFAULT_LOG_FORMAT)
|
|
48
|
+
|
|
49
|
+
# Event Capture Settings
|
|
50
|
+
self.send_to_transport: bool = self._get_env_bool(
|
|
51
|
+
"SEND_TO_TRANSPORT", defaults.DEFAULT_SEND_TO_TRANSPORT
|
|
52
|
+
)
|
|
53
|
+
self.default_component: PipelineComponent = self._get_env_enum(
|
|
54
|
+
"DEFAULT_COMPONENT", PipelineComponent, defaults.DEFAULT_COMPONENT
|
|
55
|
+
)
|
|
56
|
+
self.default_severity: Severity = self._get_env_enum(
|
|
57
|
+
"DEFAULT_SEVERITY", Severity, defaults.DEFAULT_SEVERITY
|
|
58
|
+
)
|
|
59
|
+
self.default_reraise: bool = self._get_env_bool(
|
|
60
|
+
"DEFAULT_RERAISE", defaults.DEFAULT_RERAISE
|
|
61
|
+
)
|
|
62
|
+
self.enable_stack_traces: bool = self._get_env_bool(
|
|
63
|
+
"ENABLE_STACK_TRACES", defaults.DEFAULT_ENABLE_STACK_TRACES
|
|
64
|
+
)
|
|
65
|
+
self.max_stack_trace_length: int = self._get_env_int(
|
|
66
|
+
"MAX_STACK_TRACE_LENGTH", defaults.DEFAULT_MAX_STACK_TRACE_LENGTH
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
self.use_background_transport: bool = self._get_env_bool(
|
|
70
|
+
"USE_BACKGROUND_TRANSPORT", False # Default to False for easier debugging
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Retry Configuration
|
|
74
|
+
self.default_max_retries: int = self._get_env_int(
|
|
75
|
+
"DEFAULT_MAX_RETRIES", defaults.DEFAULT_MAX_RETRIES
|
|
76
|
+
)
|
|
77
|
+
self.default_retry_delay: float = self._get_env_float(
|
|
78
|
+
"DEFAULT_RETRY_DELAY", defaults.DEFAULT_RETRY_DELAY
|
|
79
|
+
)
|
|
80
|
+
self.default_retry_backoff: float = self._get_env_float(
|
|
81
|
+
"DEFAULT_RETRY_BACKOFF", defaults.DEFAULT_RETRY_BACKOFF
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Storage Configuration
|
|
85
|
+
self.storage_enabled: bool = self._get_env_bool(
|
|
86
|
+
"STORAGE_ENABLED", defaults.DEFAULT_STORAGE_ENABLED
|
|
87
|
+
)
|
|
88
|
+
self.storage_path: Path = Path(
|
|
89
|
+
self._get_env("STORAGE_PATH", defaults.DEFAULT_STORAGE_PATH)
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def _get_env(self, key: str, default: Any) -> Any:
|
|
93
|
+
"""Get environment variable or return default"""
|
|
94
|
+
return os.getenv(key, default)
|
|
95
|
+
|
|
96
|
+
def _get_env_bool(self, key: str, default: bool) -> bool:
|
|
97
|
+
"""Get boolean environment variable"""
|
|
98
|
+
value = os.getenv(key)
|
|
99
|
+
if value is None:
|
|
100
|
+
return default
|
|
101
|
+
return value.lower() in ("true", "1", "yes", "on")
|
|
102
|
+
|
|
103
|
+
def _get_env_int(self, key: str, default: int) -> int:
|
|
104
|
+
"""Get integer environment variable"""
|
|
105
|
+
value = os.getenv(key)
|
|
106
|
+
if value is None:
|
|
107
|
+
return default
|
|
108
|
+
try:
|
|
109
|
+
return int(value)
|
|
110
|
+
except ValueError:
|
|
111
|
+
return default
|
|
112
|
+
|
|
113
|
+
def _get_env_float(self, key: str, default: float) -> float:
|
|
114
|
+
"""Get float environment variable"""
|
|
115
|
+
value = os.getenv(key)
|
|
116
|
+
if value is None:
|
|
117
|
+
return default
|
|
118
|
+
try:
|
|
119
|
+
return float(value)
|
|
120
|
+
except ValueError:
|
|
121
|
+
return default
|
|
122
|
+
|
|
123
|
+
def _get_env_enum(self, key: str, enum_class: type, default: Any) -> Any:
|
|
124
|
+
"""Get enum environment variable"""
|
|
125
|
+
value = os.getenv(key)
|
|
126
|
+
if value is None:
|
|
127
|
+
return default
|
|
128
|
+
try:
|
|
129
|
+
return enum_class(value.lower())
|
|
130
|
+
except (ValueError, KeyError):
|
|
131
|
+
return default
|
|
132
|
+
|
|
133
|
+
def update(self, **kwargs: Any) -> None:
|
|
134
|
+
"""Update settings programmatically"""
|
|
135
|
+
for key, value in kwargs.items():
|
|
136
|
+
if hasattr(self, key):
|
|
137
|
+
setattr(self, key, value)
|
|
138
|
+
|
|
139
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
140
|
+
"""Convert settings to dictionary"""
|
|
141
|
+
return {
|
|
142
|
+
key: value
|
|
143
|
+
for key, value in self.__dict__.items()
|
|
144
|
+
if not key.startswith("_")
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
# Global settings instance
|
|
149
|
+
_settings: Optional[Settings] = None
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def get_settings() -> Settings:
|
|
153
|
+
"""Get or create global settings instance"""
|
|
154
|
+
global _settings
|
|
155
|
+
if _settings is None:
|
|
156
|
+
_settings = Settings()
|
|
157
|
+
return _settings
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def configure(**kwargs: Any) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Configure settings programmatically.
|
|
163
|
+
|
|
164
|
+
Example:
|
|
165
|
+
configure(
|
|
166
|
+
KAFKA_BOOTSTRAP_SERVERS_RCA="localhost:9092",
|
|
167
|
+
KAFKA_ENABLED_RCA=True,
|
|
168
|
+
log_level="DEBUG"
|
|
169
|
+
)
|
|
170
|
+
"""
|
|
171
|
+
settings = get_settings()
|
|
172
|
+
settings.update(**kwargs)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def reset_settings() -> None:
|
|
176
|
+
"""Reset settings to defaults (useful for testing)"""
|
|
177
|
+
global _settings
|
|
178
|
+
_settings = None
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Decorators module for palantir-event-capture
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .capture import (
|
|
6
|
+
capture_errors,
|
|
7
|
+
async_capture_errors,
|
|
8
|
+
retry_with_capture,
|
|
9
|
+
capture_method_errors,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"capture_errors",
|
|
14
|
+
"async_capture_errors",
|
|
15
|
+
"retry_with_capture",
|
|
16
|
+
"capture_method_errors",
|
|
17
|
+
]
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Decorators for automatic error capture.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
import asyncio
|
|
7
|
+
import time
|
|
8
|
+
from typing import Callable, Optional, Any, TypeVar, ParamSpec
|
|
9
|
+
|
|
10
|
+
from ..models.captured_models import CaptureConfig, PipelineComponent, Severity, EventType
|
|
11
|
+
from ..services.error_capture import get_capture_service
|
|
12
|
+
from ..utils.logger_config import get_logger
|
|
13
|
+
|
|
14
|
+
logger = get_logger()
|
|
15
|
+
|
|
16
|
+
P = ParamSpec('P')
|
|
17
|
+
T = TypeVar('T')
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def capture_errors(
|
|
21
|
+
component: Optional[PipelineComponent] = None,
|
|
22
|
+
component_name: Optional[str] = None,
|
|
23
|
+
pipeline_name: Optional[str] = None,
|
|
24
|
+
engine_name: Optional[str] = None,
|
|
25
|
+
severity: Severity = Severity.HIGH,
|
|
26
|
+
event_type: EventType = EventType.ERROR,
|
|
27
|
+
reraise: bool = True,
|
|
28
|
+
log_to_console: bool = True,
|
|
29
|
+
send_to_transport: bool = True,
|
|
30
|
+
**metadata: Any,
|
|
31
|
+
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
|
32
|
+
"""
|
|
33
|
+
Decorator to automatically capture exceptions in functions.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
reraise: If True, re-raise the exception after capturing.
|
|
37
|
+
If False, swallow the exception and return None.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
41
|
+
@functools.wraps(func)
|
|
42
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
43
|
+
config = CaptureConfig(
|
|
44
|
+
component=component,
|
|
45
|
+
component_name=component_name,
|
|
46
|
+
pipeline_name=pipeline_name,
|
|
47
|
+
engine_name=engine_name,
|
|
48
|
+
severity=severity,
|
|
49
|
+
event_type=event_type,
|
|
50
|
+
reraise=False,
|
|
51
|
+
log_to_console=log_to_console,
|
|
52
|
+
send_to_transport=send_to_transport,
|
|
53
|
+
metadata={
|
|
54
|
+
'function': func.__name__,
|
|
55
|
+
'module': func.__module__,
|
|
56
|
+
**metadata,
|
|
57
|
+
},
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
return func(*args, **kwargs)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
logger.debug(f"[DECORATOR] ❌ Caught {type(e).__name__}: {e}")
|
|
64
|
+
|
|
65
|
+
service = get_capture_service()
|
|
66
|
+
|
|
67
|
+
# Capture the error (won't reraise because config.reraise=False)
|
|
68
|
+
try:
|
|
69
|
+
event = service.capture_exception(
|
|
70
|
+
e,
|
|
71
|
+
config=config,
|
|
72
|
+
skip_frames=1,
|
|
73
|
+
async_send=True
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if event:
|
|
77
|
+
logger.info(f"[DECORATOR] ✅ Event {event.id} captured and sent")
|
|
78
|
+
else:
|
|
79
|
+
logger.warning(f"[DECORATOR] ⚠️ Event capture returned None")
|
|
80
|
+
|
|
81
|
+
except Exception as capture_error:
|
|
82
|
+
logger.error(f"[DECORATOR] ❌ Capture failed: {capture_error}")
|
|
83
|
+
# Continue to reraise check below
|
|
84
|
+
|
|
85
|
+
if reraise:
|
|
86
|
+
logger.debug(f"[DECORATOR] Re-raising {type(e).__name__}")
|
|
87
|
+
raise
|
|
88
|
+
else:
|
|
89
|
+
logger.debug(f"[DECORATOR] Swallowing error, returning None")
|
|
90
|
+
return None # Swallow error, return None
|
|
91
|
+
|
|
92
|
+
return wrapper
|
|
93
|
+
|
|
94
|
+
return decorator
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def async_capture_errors(
|
|
98
|
+
component: Optional[PipelineComponent] = None,
|
|
99
|
+
component_name: Optional[str] = None,
|
|
100
|
+
pipeline_name: Optional[str] = None,
|
|
101
|
+
engine_name: Optional[str] = None,
|
|
102
|
+
severity: Severity = Severity.HIGH,
|
|
103
|
+
event_type: EventType = EventType.ERROR,
|
|
104
|
+
reraise: bool = True,
|
|
105
|
+
log_to_console: bool = True,
|
|
106
|
+
send_to_transport: bool = True,
|
|
107
|
+
**metadata: Any,
|
|
108
|
+
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
|
109
|
+
"""
|
|
110
|
+
Async decorator to automatically capture exceptions in async functions.
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
114
|
+
@functools.wraps(func)
|
|
115
|
+
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
116
|
+
config = CaptureConfig(
|
|
117
|
+
component=component,
|
|
118
|
+
component_name=component_name,
|
|
119
|
+
pipeline_name=pipeline_name,
|
|
120
|
+
engine_name=engine_name,
|
|
121
|
+
severity=severity,
|
|
122
|
+
event_type=event_type,
|
|
123
|
+
reraise=False,
|
|
124
|
+
log_to_console=log_to_console,
|
|
125
|
+
send_to_transport=send_to_transport,
|
|
126
|
+
metadata={
|
|
127
|
+
'function': func.__name__,
|
|
128
|
+
'module': func.__module__,
|
|
129
|
+
**metadata,
|
|
130
|
+
},
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
return await func(*args, **kwargs)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
logger.debug(f"[ASYNC DECORATOR] ❌ Caught {type(e).__name__}: {e}")
|
|
137
|
+
|
|
138
|
+
service = get_capture_service()
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
event = service.capture_exception(
|
|
142
|
+
e,
|
|
143
|
+
config=config,
|
|
144
|
+
skip_frames=1,
|
|
145
|
+
async_send=True
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
if event:
|
|
149
|
+
logger.info(f"[ASYNC DECORATOR] ✅ Event {event.id} captured")
|
|
150
|
+
else:
|
|
151
|
+
logger.warning(f"[ASYNC DECORATOR] ⚠️ Event capture returned None")
|
|
152
|
+
|
|
153
|
+
except Exception as capture_error:
|
|
154
|
+
logger.error(f"[ASYNC DECORATOR] ❌ Capture failed: {capture_error}")
|
|
155
|
+
|
|
156
|
+
# Handle reraise
|
|
157
|
+
if reraise:
|
|
158
|
+
logger.debug(f"[ASYNC DECORATOR] Re-raising {type(e).__name__}")
|
|
159
|
+
raise
|
|
160
|
+
else:
|
|
161
|
+
logger.debug(f"[ASYNC DECORATOR] Swallowing error, returning None")
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
return wrapper # type: ignore
|
|
165
|
+
|
|
166
|
+
return decorator
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def retry_with_capture(
|
|
170
|
+
max_retries: int = 3,
|
|
171
|
+
delay: float = 1.0,
|
|
172
|
+
backoff: float = 2.0,
|
|
173
|
+
component: Optional[PipelineComponent] = None,
|
|
174
|
+
component_name: Optional[str] = None,
|
|
175
|
+
pipeline_name: Optional[str] = None,
|
|
176
|
+
engine_name: Optional[str] = None,
|
|
177
|
+
severity: Severity = Severity.MEDIUM,
|
|
178
|
+
capture_all_attempts: bool = False,
|
|
179
|
+
**metadata: Any,
|
|
180
|
+
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
|
181
|
+
"""
|
|
182
|
+
Decorator that retries a function on failure and captures errors.
|
|
183
|
+
"""
|
|
184
|
+
|
|
185
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
186
|
+
@functools.wraps(func)
|
|
187
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
188
|
+
last_exception: Optional[Exception] = None
|
|
189
|
+
current_delay = delay
|
|
190
|
+
|
|
191
|
+
for attempt in range(max_retries + 1):
|
|
192
|
+
try:
|
|
193
|
+
return func(*args, **kwargs)
|
|
194
|
+
except Exception as e:
|
|
195
|
+
last_exception = e
|
|
196
|
+
|
|
197
|
+
# Determine if this is the final attempt
|
|
198
|
+
is_final = attempt == max_retries
|
|
199
|
+
|
|
200
|
+
# Capture this attempt if configured
|
|
201
|
+
if capture_all_attempts or is_final:
|
|
202
|
+
config = CaptureConfig(
|
|
203
|
+
component=component,
|
|
204
|
+
component_name=component_name,
|
|
205
|
+
pipeline_name=pipeline_name,
|
|
206
|
+
engine_name=engine_name,
|
|
207
|
+
severity=severity if not is_final else Severity.HIGH,
|
|
208
|
+
event_type=EventType.ERROR if is_final else EventType.WARNING,
|
|
209
|
+
reraise=False, # Never reraise in capture_exception
|
|
210
|
+
log_to_console=True,
|
|
211
|
+
send_to_transport=is_final, # Only send final failure
|
|
212
|
+
metadata={
|
|
213
|
+
'function': func.__name__,
|
|
214
|
+
'module': func.__module__,
|
|
215
|
+
'attempt': attempt + 1,
|
|
216
|
+
'max_retries': max_retries,
|
|
217
|
+
'is_final_attempt': is_final,
|
|
218
|
+
**metadata,
|
|
219
|
+
},
|
|
220
|
+
)
|
|
221
|
+
service = get_capture_service()
|
|
222
|
+
try:
|
|
223
|
+
service.capture_exception(e, config=config, skip_frames=1, async_send=True)
|
|
224
|
+
except Exception:
|
|
225
|
+
pass # Ignore exceptions during capture
|
|
226
|
+
|
|
227
|
+
# Sleep before retry (except on last attempt)
|
|
228
|
+
if not is_final:
|
|
229
|
+
time.sleep(current_delay)
|
|
230
|
+
current_delay *= backoff
|
|
231
|
+
|
|
232
|
+
# Raise the last exception after all retries
|
|
233
|
+
if last_exception:
|
|
234
|
+
raise last_exception
|
|
235
|
+
|
|
236
|
+
# Should never reach here
|
|
237
|
+
raise RuntimeError("Unexpected state in retry_with_capture")
|
|
238
|
+
|
|
239
|
+
return wrapper # type: ignore
|
|
240
|
+
|
|
241
|
+
return decorator
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def capture_method_errors(
|
|
245
|
+
component: Optional[PipelineComponent] = None,
|
|
246
|
+
severity: Severity = Severity.HIGH,
|
|
247
|
+
reraise: bool = True,
|
|
248
|
+
) -> Callable[[type], type]:
|
|
249
|
+
"""
|
|
250
|
+
Class decorator to wrap all methods with error capture.
|
|
251
|
+
"""
|
|
252
|
+
|
|
253
|
+
def decorator(cls: type) -> type:
|
|
254
|
+
for attr_name in dir(cls):
|
|
255
|
+
if attr_name.startswith('_'):
|
|
256
|
+
continue
|
|
257
|
+
|
|
258
|
+
attr = getattr(cls, attr_name)
|
|
259
|
+
if callable(attr):
|
|
260
|
+
# Check if it's an async method
|
|
261
|
+
if asyncio.iscoroutinefunction(attr):
|
|
262
|
+
wrapped = async_capture_errors(
|
|
263
|
+
component=component,
|
|
264
|
+
component_name=cls.__name__,
|
|
265
|
+
severity=severity,
|
|
266
|
+
reraise=reraise,
|
|
267
|
+
)(attr)
|
|
268
|
+
else:
|
|
269
|
+
wrapped = capture_errors(
|
|
270
|
+
component=component,
|
|
271
|
+
component_name=cls.__name__,
|
|
272
|
+
severity=severity,
|
|
273
|
+
reraise=reraise,
|
|
274
|
+
)(attr)
|
|
275
|
+
setattr(cls, attr_name, wrapped)
|
|
276
|
+
|
|
277
|
+
return cls
|
|
278
|
+
|
|
279
|
+
return decorator
|
|
File without changes
|