unrealon 1.1.6__py3-none-any.whl → 2.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {unrealon-1.1.6.dist-info/licenses → unrealon-2.0.5.dist-info}/LICENSE +1 -1
- unrealon-2.0.5.dist-info/METADATA +491 -0
- unrealon-2.0.5.dist-info/RECORD +128 -0
- {unrealon-1.1.6.dist-info → unrealon-2.0.5.dist-info}/WHEEL +2 -1
- unrealon-2.0.5.dist-info/entry_points.txt +3 -0
- unrealon-2.0.5.dist-info/top_level.txt +3 -0
- unrealon_browser/__init__.py +5 -6
- unrealon_browser/cli/browser_cli.py +18 -9
- unrealon_browser/cli/interactive_mode.py +13 -4
- unrealon_browser/core/browser_manager.py +29 -16
- unrealon_browser/dto/__init__.py +21 -0
- unrealon_browser/dto/bot_detection.py +175 -0
- unrealon_browser/dto/models/config.py +9 -3
- unrealon_browser/managers/__init__.py +1 -1
- unrealon_browser/managers/logger_bridge.py +1 -4
- unrealon_browser/stealth/__init__.py +27 -0
- unrealon_browser/stealth/bypass_techniques.pyc +0 -0
- unrealon_browser/stealth/manager.pyc +0 -0
- unrealon_browser/stealth/nodriver_stealth.pyc +0 -0
- unrealon_browser/stealth/playwright_stealth.pyc +0 -0
- unrealon_browser/stealth/scanner_tester.pyc +0 -0
- unrealon_browser/stealth/undetected_chrome.pyc +0 -0
- unrealon_core/__init__.py +172 -0
- unrealon_core/config/__init__.py +16 -0
- unrealon_core/config/environment.py +151 -0
- unrealon_core/config/urls.py +94 -0
- unrealon_core/enums/__init__.py +24 -0
- unrealon_core/enums/status.py +216 -0
- unrealon_core/enums/types.py +240 -0
- unrealon_core/error_handling/__init__.py +45 -0
- unrealon_core/error_handling/circuit_breaker.py +292 -0
- unrealon_core/error_handling/error_context.py +324 -0
- unrealon_core/error_handling/recovery.py +371 -0
- unrealon_core/error_handling/retry.py +268 -0
- unrealon_core/exceptions/__init__.py +46 -0
- unrealon_core/exceptions/base.py +292 -0
- unrealon_core/exceptions/communication.py +22 -0
- unrealon_core/exceptions/driver.py +11 -0
- unrealon_core/exceptions/proxy.py +11 -0
- unrealon_core/exceptions/task.py +12 -0
- unrealon_core/exceptions/validation.py +17 -0
- unrealon_core/models/__init__.py +79 -0
- unrealon_core/models/arq_context.py +252 -0
- unrealon_core/models/arq_responses.py +125 -0
- unrealon_core/models/base.py +291 -0
- unrealon_core/models/bridge_stats.py +58 -0
- unrealon_core/models/communication.py +39 -0
- unrealon_core/models/connection_stats.py +47 -0
- unrealon_core/models/driver.py +30 -0
- unrealon_core/models/driver_details.py +98 -0
- unrealon_core/models/logging.py +28 -0
- unrealon_core/models/task.py +21 -0
- unrealon_core/models/typed_responses.py +210 -0
- unrealon_core/models/websocket/__init__.py +91 -0
- unrealon_core/models/websocket/base.py +49 -0
- unrealon_core/models/websocket/config.py +200 -0
- unrealon_core/models/websocket/driver.py +215 -0
- unrealon_core/models/websocket/errors.py +138 -0
- unrealon_core/models/websocket/heartbeat.py +100 -0
- unrealon_core/models/websocket/logging.py +261 -0
- unrealon_core/models/websocket/proxy.py +496 -0
- unrealon_core/models/websocket/tasks.py +275 -0
- unrealon_core/models/websocket/utils.py +153 -0
- unrealon_core/models/websocket_session.py +144 -0
- unrealon_core/monitoring/__init__.py +43 -0
- unrealon_core/monitoring/alerts.py +398 -0
- unrealon_core/monitoring/dashboard.py +307 -0
- unrealon_core/monitoring/health_check.py +354 -0
- unrealon_core/monitoring/metrics.py +352 -0
- unrealon_core/utils/__init__.py +11 -0
- unrealon_core/utils/time.py +61 -0
- unrealon_core/version.py +219 -0
- unrealon_driver/__init__.py +90 -51
- unrealon_driver/core_module/__init__.py +34 -0
- unrealon_driver/core_module/base.py +184 -0
- unrealon_driver/core_module/config.py +30 -0
- unrealon_driver/core_module/event_manager.py +127 -0
- unrealon_driver/core_module/protocols.py +98 -0
- unrealon_driver/core_module/registry.py +146 -0
- unrealon_driver/decorators/__init__.py +15 -0
- unrealon_driver/decorators/retry.py +117 -0
- unrealon_driver/decorators/schedule.py +137 -0
- unrealon_driver/decorators/task.py +61 -0
- unrealon_driver/decorators/timing.py +132 -0
- unrealon_driver/driver/__init__.py +20 -0
- unrealon_driver/driver/communication/__init__.py +10 -0
- unrealon_driver/driver/communication/session.py +203 -0
- unrealon_driver/driver/communication/websocket_client.py +205 -0
- unrealon_driver/driver/core/__init__.py +10 -0
- unrealon_driver/driver/core/config.py +175 -0
- unrealon_driver/driver/core/driver.py +221 -0
- unrealon_driver/driver/factory/__init__.py +9 -0
- unrealon_driver/driver/factory/manager_factory.py +130 -0
- unrealon_driver/driver/lifecycle/__init__.py +11 -0
- unrealon_driver/driver/lifecycle/daemon.py +76 -0
- unrealon_driver/driver/lifecycle/initialization.py +97 -0
- unrealon_driver/driver/lifecycle/shutdown.py +48 -0
- unrealon_driver/driver/monitoring/__init__.py +9 -0
- unrealon_driver/driver/monitoring/health.py +63 -0
- unrealon_driver/driver/utilities/__init__.py +10 -0
- unrealon_driver/driver/utilities/logging.py +51 -0
- unrealon_driver/driver/utilities/serialization.py +61 -0
- unrealon_driver/managers/__init__.py +32 -0
- unrealon_driver/managers/base.py +174 -0
- unrealon_driver/managers/browser.py +98 -0
- unrealon_driver/managers/cache.py +116 -0
- unrealon_driver/managers/http.py +107 -0
- unrealon_driver/managers/logger.py +286 -0
- unrealon_driver/managers/proxy.py +99 -0
- unrealon_driver/managers/registry.py +87 -0
- unrealon_driver/managers/threading.py +54 -0
- unrealon_driver/managers/update.py +107 -0
- unrealon_driver/utils/__init__.py +9 -0
- unrealon_driver/utils/time.py +10 -0
- unrealon-1.1.6.dist-info/METADATA +0 -625
- unrealon-1.1.6.dist-info/RECORD +0 -55
- unrealon-1.1.6.dist-info/entry_points.txt +0 -9
- unrealon_browser/managers/stealth.py +0 -388
- unrealon_driver/README.md +0 -0
- unrealon_driver/exceptions.py +0 -33
- unrealon_driver/html_analyzer/__init__.py +0 -32
- unrealon_driver/html_analyzer/cleaner.py +0 -657
- unrealon_driver/html_analyzer/config.py +0 -64
- unrealon_driver/html_analyzer/manager.py +0 -247
- unrealon_driver/html_analyzer/models.py +0 -115
- unrealon_driver/html_analyzer/websocket_analyzer.py +0 -157
- unrealon_driver/models/__init__.py +0 -31
- unrealon_driver/models/websocket.py +0 -98
- unrealon_driver/parser/__init__.py +0 -36
- unrealon_driver/parser/cli_manager.py +0 -142
- unrealon_driver/parser/daemon_manager.py +0 -403
- unrealon_driver/parser/managers/__init__.py +0 -25
- unrealon_driver/parser/managers/config.py +0 -293
- unrealon_driver/parser/managers/error.py +0 -412
- unrealon_driver/parser/managers/result.py +0 -321
- unrealon_driver/parser/parser_manager.py +0 -458
- unrealon_driver/smart_logging/__init__.py +0 -24
- unrealon_driver/smart_logging/models.py +0 -44
- unrealon_driver/smart_logging/smart_logger.py +0 -406
- unrealon_driver/smart_logging/unified_logger.py +0 -525
- unrealon_driver/websocket/__init__.py +0 -31
- unrealon_driver/websocket/client.py +0 -249
- unrealon_driver/websocket/config.py +0 -188
- unrealon_driver/websocket/manager.py +0 -90
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clean cache manager.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from typing import Any, Optional, Dict
|
|
7
|
+
from datetime import datetime, timedelta
|
|
8
|
+
from pydantic import Field
|
|
9
|
+
|
|
10
|
+
from ..utils.time import utc_now
|
|
11
|
+
from .base import BaseManager, ManagerConfig
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CacheManagerConfig(ManagerConfig):
|
|
15
|
+
"""Cache manager configuration."""
|
|
16
|
+
default_ttl: int = Field(default=3600, description="Default TTL seconds")
|
|
17
|
+
max_size: int = Field(default=1000, description="Max cache entries")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class CacheEntry:
|
|
21
|
+
"""Cache entry with TTL."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, value: Any, ttl: int):
|
|
24
|
+
self.value = value
|
|
25
|
+
self.expires_at = utc_now() + timedelta(seconds=ttl)
|
|
26
|
+
|
|
27
|
+
def is_expired(self) -> bool:
|
|
28
|
+
"""Check if entry is expired."""
|
|
29
|
+
return utc_now() > self.expires_at
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class CacheManager(BaseManager):
|
|
33
|
+
"""Simple in-memory cache manager."""
|
|
34
|
+
|
|
35
|
+
def __init__(self, config: CacheManagerConfig):
|
|
36
|
+
super().__init__(config, "cache")
|
|
37
|
+
self.config: CacheManagerConfig = config
|
|
38
|
+
self._cache: Dict[str, CacheEntry] = {}
|
|
39
|
+
self._cleanup_task: Optional[asyncio.Task] = None
|
|
40
|
+
|
|
41
|
+
async def _initialize(self) -> bool:
|
|
42
|
+
"""Initialize cache."""
|
|
43
|
+
# Start cleanup task
|
|
44
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_expired())
|
|
45
|
+
return True
|
|
46
|
+
|
|
47
|
+
async def _shutdown(self):
|
|
48
|
+
"""Shutdown cache."""
|
|
49
|
+
if self._cleanup_task:
|
|
50
|
+
self._cleanup_task.cancel()
|
|
51
|
+
try:
|
|
52
|
+
await self._cleanup_task
|
|
53
|
+
except asyncio.CancelledError:
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
self._cache.clear()
|
|
57
|
+
|
|
58
|
+
async def _cleanup_expired(self):
|
|
59
|
+
"""Background task to clean expired entries."""
|
|
60
|
+
while True:
|
|
61
|
+
try:
|
|
62
|
+
await asyncio.sleep(60) # Cleanup every minute
|
|
63
|
+
|
|
64
|
+
expired_keys = []
|
|
65
|
+
for key, entry in self._cache.items():
|
|
66
|
+
if entry.is_expired():
|
|
67
|
+
expired_keys.append(key)
|
|
68
|
+
|
|
69
|
+
for key in expired_keys:
|
|
70
|
+
del self._cache[key]
|
|
71
|
+
|
|
72
|
+
except asyncio.CancelledError:
|
|
73
|
+
break
|
|
74
|
+
except Exception as e:
|
|
75
|
+
self.logger.error(f"Cache cleanup error: {e}")
|
|
76
|
+
|
|
77
|
+
def get(self, key: str) -> Optional[Any]:
|
|
78
|
+
"""Get value from cache."""
|
|
79
|
+
entry = self._cache.get(key)
|
|
80
|
+
if not entry:
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
if entry.is_expired():
|
|
84
|
+
del self._cache[key]
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
return entry.value
|
|
88
|
+
|
|
89
|
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
|
90
|
+
"""Set value in cache."""
|
|
91
|
+
if len(self._cache) >= self.config.max_size:
|
|
92
|
+
# Remove oldest entry
|
|
93
|
+
oldest_key = next(iter(self._cache))
|
|
94
|
+
del self._cache[oldest_key]
|
|
95
|
+
|
|
96
|
+
ttl = ttl or self.config.default_ttl
|
|
97
|
+
self._cache[key] = CacheEntry(value, ttl)
|
|
98
|
+
|
|
99
|
+
def delete(self, key: str) -> bool:
|
|
100
|
+
"""Delete key from cache."""
|
|
101
|
+
if key in self._cache:
|
|
102
|
+
del self._cache[key]
|
|
103
|
+
return True
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
def clear(self) -> None:
|
|
107
|
+
"""Clear all cache entries."""
|
|
108
|
+
self._cache.clear()
|
|
109
|
+
|
|
110
|
+
async def _health_check(self) -> Dict[str, Any]:
|
|
111
|
+
"""Cache health check."""
|
|
112
|
+
return {
|
|
113
|
+
"status": "ok",
|
|
114
|
+
"entries": len(self._cache),
|
|
115
|
+
"max_size": self.config.max_size
|
|
116
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clean HTTP manager for requests.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import aiohttp
|
|
7
|
+
from typing import Dict, Any, Optional
|
|
8
|
+
from pydantic import Field
|
|
9
|
+
|
|
10
|
+
from .base import BaseManager, ManagerConfig
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class HttpManagerConfig(ManagerConfig):
|
|
14
|
+
"""HTTP manager configuration."""
|
|
15
|
+
user_agent: str = Field(default="UnrealOn-Driver/1.0", description="User agent string")
|
|
16
|
+
max_connections: int = Field(default=100, description="Max concurrent connections")
|
|
17
|
+
connector_limit: int = Field(default=30, description="Connector limit per host")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class HttpManager(BaseManager):
|
|
21
|
+
"""Clean HTTP manager with aiohttp."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, config: HttpManagerConfig):
|
|
24
|
+
super().__init__(config, "http")
|
|
25
|
+
self.config: HttpManagerConfig = config
|
|
26
|
+
self.session: Optional[aiohttp.ClientSession] = None
|
|
27
|
+
|
|
28
|
+
async def _initialize(self) -> bool:
|
|
29
|
+
"""Initialize HTTP session."""
|
|
30
|
+
try:
|
|
31
|
+
# Create connector
|
|
32
|
+
connector = aiohttp.TCPConnector(
|
|
33
|
+
limit=self.config.max_connections,
|
|
34
|
+
limit_per_host=self.config.connector_limit,
|
|
35
|
+
ttl_dns_cache=300,
|
|
36
|
+
use_dns_cache=True
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Create session
|
|
40
|
+
timeout = aiohttp.ClientTimeout(total=self.config.timeout)
|
|
41
|
+
headers = {"User-Agent": self.config.user_agent}
|
|
42
|
+
|
|
43
|
+
self.session = aiohttp.ClientSession(
|
|
44
|
+
connector=connector,
|
|
45
|
+
timeout=timeout,
|
|
46
|
+
headers=headers
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
return True
|
|
50
|
+
|
|
51
|
+
except Exception as e:
|
|
52
|
+
self.logger.error(f"HTTP manager initialization failed: {e}")
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
async def _shutdown(self):
|
|
56
|
+
"""Shutdown HTTP session."""
|
|
57
|
+
if self.session:
|
|
58
|
+
await self.session.close()
|
|
59
|
+
self.session = None
|
|
60
|
+
|
|
61
|
+
async def get(self, url: str, **kwargs) -> aiohttp.ClientResponse:
|
|
62
|
+
"""Make GET request."""
|
|
63
|
+
if not self.session:
|
|
64
|
+
raise RuntimeError("HTTP manager not initialized")
|
|
65
|
+
|
|
66
|
+
start_time = asyncio.get_event_loop().time()
|
|
67
|
+
success = False
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
response = await self.session.get(url, **kwargs)
|
|
71
|
+
success = True
|
|
72
|
+
return response
|
|
73
|
+
finally:
|
|
74
|
+
duration = asyncio.get_event_loop().time() - start_time
|
|
75
|
+
self.stats.record_operation(success, duration)
|
|
76
|
+
|
|
77
|
+
async def post(self, url: str, **kwargs) -> aiohttp.ClientResponse:
|
|
78
|
+
"""Make POST request."""
|
|
79
|
+
if not self.session:
|
|
80
|
+
raise RuntimeError("HTTP manager not initialized")
|
|
81
|
+
|
|
82
|
+
start_time = asyncio.get_event_loop().time()
|
|
83
|
+
success = False
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
response = await self.session.post(url, **kwargs)
|
|
87
|
+
success = True
|
|
88
|
+
return response
|
|
89
|
+
finally:
|
|
90
|
+
duration = asyncio.get_event_loop().time() - start_time
|
|
91
|
+
self.stats.record_operation(success, duration)
|
|
92
|
+
|
|
93
|
+
async def request(self, method: str, url: str, **kwargs) -> aiohttp.ClientResponse:
|
|
94
|
+
"""Make generic request."""
|
|
95
|
+
if not self.session:
|
|
96
|
+
raise RuntimeError("HTTP manager not initialized")
|
|
97
|
+
|
|
98
|
+
start_time = asyncio.get_event_loop().time()
|
|
99
|
+
success = False
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
response = await self.session.request(method, url, **kwargs)
|
|
103
|
+
success = True
|
|
104
|
+
return response
|
|
105
|
+
finally:
|
|
106
|
+
duration = asyncio.get_event_loop().time() - start_time
|
|
107
|
+
self.stats.record_operation(success, duration)
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clean logger manager with RPC batching and local fallback.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import logging
|
|
7
|
+
import logging.handlers
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import List, Optional, Dict, Any
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from collections import deque
|
|
12
|
+
|
|
13
|
+
from pydantic import BaseModel, Field
|
|
14
|
+
|
|
15
|
+
from unrealon_core.models.logging import LogEntryData, LogContext
|
|
16
|
+
from unrealon_core.models.websocket.logging import LogBatchMessage, LogBatchData
|
|
17
|
+
from unrealon_driver.utils.time import utc_now
|
|
18
|
+
from .base import BaseManager, ManagerConfig
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class LoggerManagerConfig(ManagerConfig):
|
|
24
|
+
"""Logger manager configuration."""
|
|
25
|
+
|
|
26
|
+
# Local logging
|
|
27
|
+
log_file: Optional[str] = Field(default=None, description="Local log file path")
|
|
28
|
+
max_file_size: int = Field(default=10485760, description="Max log file size (10MB)")
|
|
29
|
+
backup_count: int = Field(default=5, description="Number of backup files")
|
|
30
|
+
|
|
31
|
+
# RPC batching
|
|
32
|
+
batch_size: int = Field(default=10, description="Logs per batch")
|
|
33
|
+
batch_timeout: float = Field(default=5.0, description="Max batch wait time")
|
|
34
|
+
|
|
35
|
+
# Driver info
|
|
36
|
+
driver_id: str = Field(..., description="Driver ID for logs")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class LoggerManager(BaseManager):
|
|
40
|
+
"""
|
|
41
|
+
Clean logger manager.
|
|
42
|
+
|
|
43
|
+
Features:
|
|
44
|
+
- Local file logging (always works)
|
|
45
|
+
- RPC batching to server (when available)
|
|
46
|
+
- Automatic fallback on RPC failure
|
|
47
|
+
- Clean batch processing
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def __init__(self, config: LoggerManagerConfig, websocket_client=None):
|
|
51
|
+
super().__init__(config, "logger")
|
|
52
|
+
self.config: LoggerManagerConfig = config
|
|
53
|
+
self.websocket_client = websocket_client
|
|
54
|
+
|
|
55
|
+
# Local logger setup
|
|
56
|
+
self.local_logger = logging.getLogger(f"driver.{config.driver_id}")
|
|
57
|
+
self._setup_local_logging()
|
|
58
|
+
|
|
59
|
+
# RPC batching
|
|
60
|
+
self._log_batch: deque = deque()
|
|
61
|
+
self._batch_lock = asyncio.Lock()
|
|
62
|
+
self._batch_task: Optional[asyncio.Task] = None
|
|
63
|
+
self._running = False
|
|
64
|
+
|
|
65
|
+
def _setup_local_logging(self):
|
|
66
|
+
"""Setup local file logging."""
|
|
67
|
+
if not self.config.log_file:
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
# Create log directory if needed
|
|
72
|
+
log_path = Path(self.config.log_file)
|
|
73
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
# Setup rotating file handler
|
|
76
|
+
handler = logging.handlers.RotatingFileHandler(
|
|
77
|
+
self.config.log_file,
|
|
78
|
+
maxBytes=self.config.max_file_size,
|
|
79
|
+
backupCount=self.config.backup_count
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
# Set format
|
|
83
|
+
formatter = logging.Formatter(
|
|
84
|
+
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
85
|
+
)
|
|
86
|
+
handler.setFormatter(formatter)
|
|
87
|
+
|
|
88
|
+
# Add to logger
|
|
89
|
+
self.local_logger.addHandler(handler)
|
|
90
|
+
self.local_logger.setLevel(getattr(logging, self.config.log_level))
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
logger.error(f"Failed to setup local logging: {e}")
|
|
94
|
+
|
|
95
|
+
async def _initialize(self) -> bool:
|
|
96
|
+
"""Initialize logger manager."""
|
|
97
|
+
try:
|
|
98
|
+
# Start batch processor
|
|
99
|
+
self._running = True
|
|
100
|
+
self._batch_task = asyncio.create_task(self._batch_processor())
|
|
101
|
+
|
|
102
|
+
logger.info("Logger manager initialized")
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
except Exception as e:
|
|
106
|
+
logger.error(f"Logger manager initialization failed: {e}")
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
async def _shutdown(self):
|
|
110
|
+
"""Shutdown logger manager."""
|
|
111
|
+
try:
|
|
112
|
+
# Stop batch processor
|
|
113
|
+
self._running = False
|
|
114
|
+
|
|
115
|
+
if self._batch_task and not self._batch_task.done():
|
|
116
|
+
self._batch_task.cancel()
|
|
117
|
+
try:
|
|
118
|
+
await self._batch_task
|
|
119
|
+
except asyncio.CancelledError:
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
# Send remaining logs
|
|
123
|
+
if self._log_batch:
|
|
124
|
+
await self._send_batch()
|
|
125
|
+
|
|
126
|
+
logger.info("Logger manager shutdown complete")
|
|
127
|
+
|
|
128
|
+
except Exception as e:
|
|
129
|
+
logger.error(f"Logger manager shutdown error: {e}")
|
|
130
|
+
|
|
131
|
+
async def _batch_processor(self):
|
|
132
|
+
"""Background task to process log batches."""
|
|
133
|
+
while self._running:
|
|
134
|
+
try:
|
|
135
|
+
# Wait for batch timeout or until we have enough logs
|
|
136
|
+
await asyncio.sleep(self.config.batch_timeout)
|
|
137
|
+
|
|
138
|
+
async with self._batch_lock:
|
|
139
|
+
if len(self._log_batch) >= self.config.batch_size:
|
|
140
|
+
await self._send_batch()
|
|
141
|
+
|
|
142
|
+
except asyncio.CancelledError:
|
|
143
|
+
break
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.error(f"Batch processor error: {e}")
|
|
146
|
+
await asyncio.sleep(1.0)
|
|
147
|
+
|
|
148
|
+
async def _send_batch(self):
|
|
149
|
+
"""Send current batch via RPC."""
|
|
150
|
+
if not self._log_batch or not self.websocket_client:
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
# Get logs from batch
|
|
155
|
+
logs_to_send = []
|
|
156
|
+
while self._log_batch and len(logs_to_send) < self.config.batch_size:
|
|
157
|
+
logs_to_send.append(self._log_batch.popleft())
|
|
158
|
+
|
|
159
|
+
if not logs_to_send:
|
|
160
|
+
return
|
|
161
|
+
|
|
162
|
+
# Create batch message
|
|
163
|
+
batch_data = LogBatchData(
|
|
164
|
+
driver_id=self.config.driver_id,
|
|
165
|
+
logs=logs_to_send,
|
|
166
|
+
batch_timestamp=utc_now().isoformat()
|
|
167
|
+
)
|
|
168
|
+
batch_message = LogBatchMessage(data=batch_data)
|
|
169
|
+
|
|
170
|
+
# Send via WebSocket
|
|
171
|
+
self.websocket_client.send(batch_message)
|
|
172
|
+
|
|
173
|
+
# Update stats
|
|
174
|
+
self.stats.record_operation(True, 0.0)
|
|
175
|
+
|
|
176
|
+
except Exception as e:
|
|
177
|
+
logger.error(f"Failed to send log batch: {e}")
|
|
178
|
+
# Put logs back in batch for retry
|
|
179
|
+
for log_entry in reversed(logs_to_send):
|
|
180
|
+
self._log_batch.appendleft(log_entry)
|
|
181
|
+
|
|
182
|
+
self.stats.record_operation(False, 0.0)
|
|
183
|
+
|
|
184
|
+
def log(self, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
|
185
|
+
"""
|
|
186
|
+
Log message with both local and RPC.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
190
|
+
message: Log message
|
|
191
|
+
context: Additional context data
|
|
192
|
+
"""
|
|
193
|
+
try:
|
|
194
|
+
# Always log locally first
|
|
195
|
+
self._log_local(level, message, context)
|
|
196
|
+
|
|
197
|
+
# Add to RPC batch if WebSocket available
|
|
198
|
+
if self.websocket_client:
|
|
199
|
+
self._add_to_batch(level, message, context)
|
|
200
|
+
|
|
201
|
+
except Exception as e:
|
|
202
|
+
logger.error(f"Logging failed: {e}")
|
|
203
|
+
|
|
204
|
+
def _log_local(self, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
|
205
|
+
"""Log to local file."""
|
|
206
|
+
try:
|
|
207
|
+
# Format message with context
|
|
208
|
+
if context:
|
|
209
|
+
formatted_message = f"{message} | Context: {context}"
|
|
210
|
+
else:
|
|
211
|
+
formatted_message = message
|
|
212
|
+
|
|
213
|
+
# Log at appropriate level
|
|
214
|
+
log_level = getattr(logging, level.upper(), logging.INFO)
|
|
215
|
+
self.local_logger.log(log_level, formatted_message)
|
|
216
|
+
|
|
217
|
+
except Exception as e:
|
|
218
|
+
logger.error(f"Local logging failed: {e}")
|
|
219
|
+
|
|
220
|
+
def _add_to_batch(self, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
|
221
|
+
"""Add log to RPC batch."""
|
|
222
|
+
try:
|
|
223
|
+
# Create log entry
|
|
224
|
+
log_context = LogContext() # Use default empty context
|
|
225
|
+
|
|
226
|
+
log_entry = LogEntryData(
|
|
227
|
+
timestamp=utc_now().isoformat(),
|
|
228
|
+
level=level.upper(),
|
|
229
|
+
message=message,
|
|
230
|
+
driver_id=self.config.driver_id,
|
|
231
|
+
context=log_context
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
# Add to batch (thread-safe) - only if event loop is running
|
|
235
|
+
try:
|
|
236
|
+
loop = asyncio.get_running_loop()
|
|
237
|
+
loop.create_task(self._add_to_batch_async(log_entry))
|
|
238
|
+
except RuntimeError:
|
|
239
|
+
# No running event loop - add directly to batch
|
|
240
|
+
self._log_batch.append(log_entry)
|
|
241
|
+
|
|
242
|
+
except Exception as e:
|
|
243
|
+
logger.error(f"Failed to add log to batch: {e}")
|
|
244
|
+
|
|
245
|
+
async def _add_to_batch_async(self, log_entry: LogEntryData):
|
|
246
|
+
"""Add log entry to batch asynchronously."""
|
|
247
|
+
try:
|
|
248
|
+
async with self._batch_lock:
|
|
249
|
+
self._log_batch.append(log_entry)
|
|
250
|
+
|
|
251
|
+
# Send immediately if batch is full
|
|
252
|
+
if len(self._log_batch) >= self.config.batch_size:
|
|
253
|
+
await self._send_batch()
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
logger.error(f"Failed to add log to batch async: {e}")
|
|
257
|
+
|
|
258
|
+
# Convenience methods
|
|
259
|
+
def debug(self, message: str, context: Optional[Dict[str, Any]] = None):
|
|
260
|
+
"""Log debug message."""
|
|
261
|
+
self.log("DEBUG", message, context)
|
|
262
|
+
|
|
263
|
+
def info(self, message: str, context: Optional[Dict[str, Any]] = None):
|
|
264
|
+
"""Log info message."""
|
|
265
|
+
self.log("INFO", message, context)
|
|
266
|
+
|
|
267
|
+
def warning(self, message: str, context: Optional[Dict[str, Any]] = None):
|
|
268
|
+
"""Log warning message."""
|
|
269
|
+
self.log("WARNING", message, context)
|
|
270
|
+
|
|
271
|
+
def error(self, message: str, context: Optional[Dict[str, Any]] = None):
|
|
272
|
+
"""Log error message."""
|
|
273
|
+
self.log("ERROR", message, context)
|
|
274
|
+
|
|
275
|
+
def critical(self, message: str, context: Optional[Dict[str, Any]] = None):
|
|
276
|
+
"""Log critical message."""
|
|
277
|
+
self.log("CRITICAL", message, context)
|
|
278
|
+
|
|
279
|
+
async def _health_check(self) -> Dict[str, Any]:
|
|
280
|
+
"""Health check for logger."""
|
|
281
|
+
return {
|
|
282
|
+
"status": "ok",
|
|
283
|
+
"batch_size": len(self._log_batch),
|
|
284
|
+
"local_logging": self.config.log_file is not None,
|
|
285
|
+
"rpc_logging": self.websocket_client is not None
|
|
286
|
+
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clean proxy manager.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import random
|
|
7
|
+
from typing import List, Optional, Dict, Any
|
|
8
|
+
from pydantic import Field
|
|
9
|
+
|
|
10
|
+
from .base import BaseManager, ManagerConfig
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ProxyManagerConfig(ManagerConfig):
|
|
14
|
+
"""Proxy manager configuration."""
|
|
15
|
+
proxies: List[str] = Field(default_factory=list, description="List of proxy URLs")
|
|
16
|
+
rotation_interval: int = Field(default=300, description="Rotation interval seconds")
|
|
17
|
+
health_check_interval: int = Field(default=60, description="Health check interval")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ProxyManager(BaseManager):
|
|
21
|
+
"""Simple proxy rotation manager."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, config: ProxyManagerConfig):
|
|
24
|
+
super().__init__(config, "proxy")
|
|
25
|
+
self.config: ProxyManagerConfig = config
|
|
26
|
+
self.active_proxies: List[str] = []
|
|
27
|
+
self.current_proxy: Optional[str] = None
|
|
28
|
+
self._rotation_task: Optional[asyncio.Task] = None
|
|
29
|
+
|
|
30
|
+
async def _initialize(self) -> bool:
|
|
31
|
+
"""Initialize proxy manager."""
|
|
32
|
+
self.active_proxies = self.config.proxies.copy()
|
|
33
|
+
|
|
34
|
+
if self.active_proxies:
|
|
35
|
+
self.current_proxy = random.choice(self.active_proxies)
|
|
36
|
+
# Start rotation task
|
|
37
|
+
self._rotation_task = asyncio.create_task(self._rotation_loop())
|
|
38
|
+
|
|
39
|
+
return True
|
|
40
|
+
|
|
41
|
+
async def _shutdown(self):
|
|
42
|
+
"""Shutdown proxy manager."""
|
|
43
|
+
if self._rotation_task:
|
|
44
|
+
self._rotation_task.cancel()
|
|
45
|
+
try:
|
|
46
|
+
await self._rotation_task
|
|
47
|
+
except asyncio.CancelledError:
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
async def _rotation_loop(self):
|
|
51
|
+
"""Background proxy rotation."""
|
|
52
|
+
while True:
|
|
53
|
+
try:
|
|
54
|
+
await asyncio.sleep(self.config.rotation_interval)
|
|
55
|
+
|
|
56
|
+
if self.active_proxies:
|
|
57
|
+
old_proxy = self.current_proxy
|
|
58
|
+
self.current_proxy = random.choice(self.active_proxies)
|
|
59
|
+
|
|
60
|
+
if old_proxy != self.current_proxy:
|
|
61
|
+
self.logger.info(f"Rotated proxy: {old_proxy} -> {self.current_proxy}")
|
|
62
|
+
|
|
63
|
+
except asyncio.CancelledError:
|
|
64
|
+
break
|
|
65
|
+
except Exception as e:
|
|
66
|
+
self.logger.error(f"Proxy rotation error: {e}")
|
|
67
|
+
|
|
68
|
+
def get_proxy(self) -> Optional[str]:
|
|
69
|
+
"""Get current proxy."""
|
|
70
|
+
return self.current_proxy
|
|
71
|
+
|
|
72
|
+
def get_proxy_dict(self) -> Optional[Dict[str, str]]:
|
|
73
|
+
"""Get proxy as dict for requests."""
|
|
74
|
+
if not self.current_proxy:
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
"http": self.current_proxy,
|
|
79
|
+
"https": self.current_proxy
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
def mark_proxy_bad(self, proxy: str):
|
|
83
|
+
"""Mark proxy as bad and remove from rotation."""
|
|
84
|
+
if proxy in self.active_proxies:
|
|
85
|
+
self.active_proxies.remove(proxy)
|
|
86
|
+
self.logger.warning(f"Removed bad proxy: {proxy}")
|
|
87
|
+
|
|
88
|
+
# Switch to new proxy if current is bad
|
|
89
|
+
if proxy == self.current_proxy and self.active_proxies:
|
|
90
|
+
self.current_proxy = random.choice(self.active_proxies)
|
|
91
|
+
|
|
92
|
+
async def _health_check(self) -> Dict[str, Any]:
|
|
93
|
+
"""Proxy health check."""
|
|
94
|
+
return {
|
|
95
|
+
"status": "ok",
|
|
96
|
+
"current_proxy": self.current_proxy,
|
|
97
|
+
"active_proxies": len(self.active_proxies),
|
|
98
|
+
"total_proxies": len(self.config.proxies)
|
|
99
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Clean manager registry.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from typing import Dict, List, Any, Optional
|
|
7
|
+
from .base import BaseManager
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ManagerRegistry:
|
|
11
|
+
"""
|
|
12
|
+
Clean registry for managing all driver managers.
|
|
13
|
+
|
|
14
|
+
Provides centralized lifecycle management and health monitoring.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self):
|
|
18
|
+
self.managers: Dict[str, BaseManager] = {}
|
|
19
|
+
self._initialized = False
|
|
20
|
+
|
|
21
|
+
def register(self, manager: BaseManager):
|
|
22
|
+
"""Register a manager."""
|
|
23
|
+
self.managers[manager.name] = manager
|
|
24
|
+
|
|
25
|
+
def get(self, name: str) -> Optional[BaseManager]:
|
|
26
|
+
"""Get manager by name."""
|
|
27
|
+
return self.managers.get(name)
|
|
28
|
+
|
|
29
|
+
async def initialize_all(self) -> bool:
|
|
30
|
+
"""Initialize all registered managers."""
|
|
31
|
+
if self._initialized:
|
|
32
|
+
return True
|
|
33
|
+
|
|
34
|
+
success_count = 0
|
|
35
|
+
|
|
36
|
+
for name, manager in self.managers.items():
|
|
37
|
+
try:
|
|
38
|
+
if await manager.initialize():
|
|
39
|
+
success_count += 1
|
|
40
|
+
else:
|
|
41
|
+
print(f"Manager {name} initialization failed")
|
|
42
|
+
except Exception as e:
|
|
43
|
+
print(f"Manager {name} initialization error: {e}")
|
|
44
|
+
|
|
45
|
+
self._initialized = success_count == len(self.managers)
|
|
46
|
+
return self._initialized
|
|
47
|
+
|
|
48
|
+
async def shutdown_all(self):
|
|
49
|
+
"""Shutdown all managers."""
|
|
50
|
+
for name, manager in self.managers.items():
|
|
51
|
+
try:
|
|
52
|
+
await manager.shutdown()
|
|
53
|
+
except Exception as e:
|
|
54
|
+
print(f"Manager {name} shutdown error: {e}")
|
|
55
|
+
|
|
56
|
+
self._initialized = False
|
|
57
|
+
|
|
58
|
+
async def health_check_all(self) -> Dict[str, Any]:
|
|
59
|
+
"""Get health status of all managers."""
|
|
60
|
+
health_data = {}
|
|
61
|
+
|
|
62
|
+
for name, manager in self.managers.items():
|
|
63
|
+
try:
|
|
64
|
+
health_data[name] = await manager.health_check()
|
|
65
|
+
except Exception as e:
|
|
66
|
+
health_data[name] = {
|
|
67
|
+
"name": name,
|
|
68
|
+
"status": "error",
|
|
69
|
+
"error": str(e)
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return {
|
|
73
|
+
"managers": health_data,
|
|
74
|
+
"total": len(self.managers),
|
|
75
|
+
"initialized": self._initialized
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
def get_ready_managers(self) -> List[str]:
|
|
79
|
+
"""Get list of ready manager names."""
|
|
80
|
+
return [
|
|
81
|
+
name for name, manager in self.managers.items()
|
|
82
|
+
if manager.is_ready()
|
|
83
|
+
]
|
|
84
|
+
|
|
85
|
+
def is_all_ready(self) -> bool:
|
|
86
|
+
"""Check if all managers are ready."""
|
|
87
|
+
return all(manager.is_ready() for manager in self.managers.values())
|