iflow-mcp_enuno-unifi-mcp-server 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/METADATA +1282 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/RECORD +81 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/WHEEL +4 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/entry_points.txt +2 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/licenses/LICENSE +201 -0
- src/__init__.py +3 -0
- src/__main__.py +6 -0
- src/api/__init__.py +5 -0
- src/api/client.py +727 -0
- src/api/site_manager_client.py +176 -0
- src/cache.py +483 -0
- src/config/__init__.py +5 -0
- src/config/config.py +321 -0
- src/main.py +2234 -0
- src/models/__init__.py +126 -0
- src/models/acl.py +41 -0
- src/models/backup.py +272 -0
- src/models/client.py +74 -0
- src/models/device.py +53 -0
- src/models/dpi.py +50 -0
- src/models/firewall_policy.py +123 -0
- src/models/firewall_zone.py +28 -0
- src/models/network.py +62 -0
- src/models/qos_profile.py +458 -0
- src/models/radius.py +141 -0
- src/models/reference_data.py +34 -0
- src/models/site.py +59 -0
- src/models/site_manager.py +120 -0
- src/models/topology.py +138 -0
- src/models/traffic_flow.py +137 -0
- src/models/traffic_matching_list.py +56 -0
- src/models/voucher.py +42 -0
- src/models/vpn.py +73 -0
- src/models/wan.py +48 -0
- src/models/zbf_matrix.py +49 -0
- src/resources/__init__.py +8 -0
- src/resources/clients.py +111 -0
- src/resources/devices.py +102 -0
- src/resources/networks.py +93 -0
- src/resources/site_manager.py +64 -0
- src/resources/sites.py +86 -0
- src/tools/__init__.py +25 -0
- src/tools/acls.py +328 -0
- src/tools/application.py +42 -0
- src/tools/backups.py +1173 -0
- src/tools/client_management.py +505 -0
- src/tools/clients.py +203 -0
- src/tools/device_control.py +325 -0
- src/tools/devices.py +354 -0
- src/tools/dpi.py +241 -0
- src/tools/dpi_tools.py +89 -0
- src/tools/firewall.py +417 -0
- src/tools/firewall_policies.py +430 -0
- src/tools/firewall_zones.py +515 -0
- src/tools/network_config.py +388 -0
- src/tools/networks.py +190 -0
- src/tools/port_forwarding.py +263 -0
- src/tools/qos.py +1070 -0
- src/tools/radius.py +763 -0
- src/tools/reference_data.py +107 -0
- src/tools/site_manager.py +466 -0
- src/tools/site_vpn.py +95 -0
- src/tools/sites.py +187 -0
- src/tools/topology.py +406 -0
- src/tools/traffic_flows.py +1062 -0
- src/tools/traffic_matching_lists.py +371 -0
- src/tools/vouchers.py +249 -0
- src/tools/vpn.py +76 -0
- src/tools/wans.py +30 -0
- src/tools/wifi.py +498 -0
- src/tools/zbf_matrix.py +326 -0
- src/utils/__init__.py +88 -0
- src/utils/audit.py +213 -0
- src/utils/exceptions.py +114 -0
- src/utils/helpers.py +159 -0
- src/utils/logger.py +105 -0
- src/utils/sanitize.py +244 -0
- src/utils/validators.py +160 -0
- src/webhooks/__init__.py +6 -0
- src/webhooks/handlers.py +196 -0
- src/webhooks/receiver.py +290 -0
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"""Site Manager API client for multi-site management."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from ..config import Settings
|
|
8
|
+
from ..utils import APIError, AuthenticationError, NetworkError, ResourceNotFoundError, get_logger
|
|
9
|
+
|
|
10
|
+
logger = get_logger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SiteManagerClient:
|
|
14
|
+
"""Client for UniFi Site Manager API (api.ui.com/v1/)."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, settings: Settings) -> None:
|
|
17
|
+
"""Initialize Site Manager API client.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
settings: Application settings
|
|
21
|
+
"""
|
|
22
|
+
self.settings = settings
|
|
23
|
+
self.logger = get_logger(__name__, settings.log_level)
|
|
24
|
+
|
|
25
|
+
# Site Manager API base URL
|
|
26
|
+
base_url = "https://api.ui.com/v1/"
|
|
27
|
+
|
|
28
|
+
# Initialize HTTP client
|
|
29
|
+
self.client = httpx.AsyncClient(
|
|
30
|
+
base_url=base_url,
|
|
31
|
+
headers=settings.get_headers(),
|
|
32
|
+
timeout=settings.request_timeout,
|
|
33
|
+
verify=True, # Always verify SSL for Site Manager API
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
self._authenticated = False
|
|
37
|
+
|
|
38
|
+
async def __aenter__(self) -> "SiteManagerClient":
|
|
39
|
+
"""Async context manager entry."""
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
43
|
+
"""Async context manager exit."""
|
|
44
|
+
await self.close()
|
|
45
|
+
|
|
46
|
+
async def close(self) -> None:
|
|
47
|
+
"""Close the HTTP client."""
|
|
48
|
+
await self.client.aclose()
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def is_authenticated(self) -> bool:
|
|
52
|
+
"""Check if client is authenticated.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
True if authenticated, False otherwise
|
|
56
|
+
"""
|
|
57
|
+
return self._authenticated
|
|
58
|
+
|
|
59
|
+
async def authenticate(self) -> None:
|
|
60
|
+
"""Authenticate with the Site Manager API.
|
|
61
|
+
|
|
62
|
+
Raises:
|
|
63
|
+
AuthenticationError: If authentication fails
|
|
64
|
+
"""
|
|
65
|
+
try:
|
|
66
|
+
# Test authentication with sites endpoint
|
|
67
|
+
response = await self.client.get("/v1/sites")
|
|
68
|
+
if response.status_code == 200:
|
|
69
|
+
self._authenticated = True
|
|
70
|
+
self.logger.info("Successfully authenticated with Site Manager API")
|
|
71
|
+
else:
|
|
72
|
+
raise AuthenticationError(f"Authentication failed: {response.status_code}")
|
|
73
|
+
except Exception as e:
|
|
74
|
+
self.logger.error(f"Site Manager authentication failed: {e}")
|
|
75
|
+
raise AuthenticationError(f"Failed to authenticate with Site Manager API: {e}") from e
|
|
76
|
+
|
|
77
|
+
async def get(self, endpoint: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
78
|
+
"""Make a GET request to Site Manager API.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
endpoint: API endpoint path (without /v1/ prefix)
|
|
82
|
+
params: Query parameters
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Response data as dictionary
|
|
86
|
+
|
|
87
|
+
Raises:
|
|
88
|
+
APIError: If API returns an error
|
|
89
|
+
AuthenticationError: If authentication fails
|
|
90
|
+
"""
|
|
91
|
+
if not self._authenticated:
|
|
92
|
+
await self.authenticate()
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
# Ensure endpoint starts with /v1/
|
|
96
|
+
if not endpoint.startswith("/v1/"):
|
|
97
|
+
endpoint = f"/v1/{endpoint.lstrip('/')}"
|
|
98
|
+
|
|
99
|
+
response = await self.client.get(endpoint, params=params)
|
|
100
|
+
response.raise_for_status()
|
|
101
|
+
|
|
102
|
+
return response.json() # type: ignore[no-any-return]
|
|
103
|
+
|
|
104
|
+
except httpx.HTTPStatusError as e:
|
|
105
|
+
if e.response.status_code == 401:
|
|
106
|
+
raise AuthenticationError("Site Manager API authentication failed") from e
|
|
107
|
+
elif e.response.status_code == 404:
|
|
108
|
+
raise ResourceNotFoundError("resource", endpoint) from e
|
|
109
|
+
else:
|
|
110
|
+
raise APIError(
|
|
111
|
+
message=f"Site Manager API error: {e.response.text}",
|
|
112
|
+
status_code=e.response.status_code,
|
|
113
|
+
) from e
|
|
114
|
+
except httpx.NetworkError as e:
|
|
115
|
+
raise NetworkError(f"Network communication failed: {e}") from e
|
|
116
|
+
except Exception as e:
|
|
117
|
+
self.logger.error(f"Unexpected error in Site Manager API request: {e}")
|
|
118
|
+
raise APIError(f"Unexpected error: {e}") from e
|
|
119
|
+
|
|
120
|
+
async def list_sites(
|
|
121
|
+
self, limit: int | None = None, offset: int | None = None
|
|
122
|
+
) -> dict[str, Any]:
|
|
123
|
+
"""List all sites from Site Manager API.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
limit: Maximum number of sites to return
|
|
127
|
+
offset: Number of sites to skip
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
Response with sites list
|
|
131
|
+
"""
|
|
132
|
+
params = {}
|
|
133
|
+
if limit:
|
|
134
|
+
params["limit"] = limit
|
|
135
|
+
if offset:
|
|
136
|
+
params["offset"] = offset
|
|
137
|
+
|
|
138
|
+
return await self.get("sites", params=params)
|
|
139
|
+
|
|
140
|
+
async def get_site_health(self, site_id: str | None = None) -> dict[str, Any]:
|
|
141
|
+
"""Get health metrics for a site or all sites.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
site_id: Optional site identifier. If None, returns health for all sites.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
Health metrics
|
|
148
|
+
"""
|
|
149
|
+
endpoint = "sites/health"
|
|
150
|
+
if site_id:
|
|
151
|
+
endpoint = f"sites/{site_id}/health"
|
|
152
|
+
|
|
153
|
+
return await self.get(endpoint)
|
|
154
|
+
|
|
155
|
+
async def get_internet_health(self, site_id: str | None = None) -> dict[str, Any]:
|
|
156
|
+
"""Get internet health metrics.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
site_id: Optional site identifier. If None, returns aggregate internet health.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
Internet health metrics
|
|
163
|
+
"""
|
|
164
|
+
endpoint = "internet/health"
|
|
165
|
+
if site_id:
|
|
166
|
+
endpoint = f"sites/{site_id}/internet/health"
|
|
167
|
+
|
|
168
|
+
return await self.get(endpoint)
|
|
169
|
+
|
|
170
|
+
async def list_vantage_points(self) -> dict[str, Any]:
|
|
171
|
+
"""List all Vantage Points.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Response with Vantage Points list
|
|
175
|
+
"""
|
|
176
|
+
return await self.get("vantage-points")
|
src/cache.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
1
|
+
"""Redis-based caching for UniFi MCP Server.
|
|
2
|
+
|
|
3
|
+
This module provides caching capabilities to reduce API calls and improve performance.
|
|
4
|
+
Supports configurable TTL per resource type and graceful degradation if Redis is unavailable.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
from functools import wraps
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
import redis.asyncio as redis
|
|
15
|
+
from redis.asyncio import Redis
|
|
16
|
+
from redis.exceptions import RedisError
|
|
17
|
+
|
|
18
|
+
REDIS_AVAILABLE = True
|
|
19
|
+
except ImportError:
|
|
20
|
+
REDIS_AVAILABLE = False
|
|
21
|
+
Redis = None
|
|
22
|
+
RedisError = Exception
|
|
23
|
+
|
|
24
|
+
from .config import Settings
|
|
25
|
+
from .utils import get_logger
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class CacheConfig:
|
|
29
|
+
"""Cache TTL configuration for different resource types."""
|
|
30
|
+
|
|
31
|
+
# Default TTLs in seconds
|
|
32
|
+
SITES: int = 300 # 5 minutes - sites change rarely
|
|
33
|
+
DEVICES: int = 60 # 1 minute - devices change occasionally
|
|
34
|
+
CLIENTS: int = 30 # 30 seconds - clients connect/disconnect frequently
|
|
35
|
+
NETWORKS: int = 300 # 5 minutes - networks change rarely
|
|
36
|
+
WLANS: int = 300 # 5 minutes - WLANs change rarely
|
|
37
|
+
FIREWALL_RULES: int = 300 # 5 minutes - firewall rules change rarely
|
|
38
|
+
PORT_FORWARDS: int = 300 # 5 minutes - port forwards change rarely
|
|
39
|
+
DPI_STATS: int = 120 # 2 minutes - DPI stats update frequently
|
|
40
|
+
TOPOLOGY: int = 60 # 1 minute - topology can change
|
|
41
|
+
ALERTS: int = 30 # 30 seconds - alerts are time-sensitive
|
|
42
|
+
EVENTS: int = 30 # 30 seconds - events are time-sensitive
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def get_ttl(cls, resource_type: str) -> int:
|
|
46
|
+
"""Get TTL for a resource type.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
resource_type: Resource type (sites, devices, clients, etc.)
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
TTL in seconds
|
|
53
|
+
"""
|
|
54
|
+
return getattr(cls, resource_type.upper(), 60)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class CacheClient:
|
|
58
|
+
"""Async Redis cache client with graceful degradation."""
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
settings: Settings,
|
|
63
|
+
enabled: bool = True,
|
|
64
|
+
logger: logging.Logger | None = None,
|
|
65
|
+
):
|
|
66
|
+
"""Initialize cache client.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
settings: Application settings
|
|
70
|
+
enabled: Enable/disable caching
|
|
71
|
+
logger: Optional logger instance
|
|
72
|
+
"""
|
|
73
|
+
self.settings = settings
|
|
74
|
+
self.enabled = enabled and REDIS_AVAILABLE
|
|
75
|
+
self.logger = logger or get_logger(__name__, settings.log_level)
|
|
76
|
+
self._redis: Redis | None = None
|
|
77
|
+
self._connected = False
|
|
78
|
+
|
|
79
|
+
if not REDIS_AVAILABLE and enabled:
|
|
80
|
+
self.logger.warning(
|
|
81
|
+
"Redis not available (redis package not installed). "
|
|
82
|
+
"Caching is disabled. Install with: pip install redis"
|
|
83
|
+
)
|
|
84
|
+
self.enabled = False
|
|
85
|
+
|
|
86
|
+
async def connect(self) -> bool:
|
|
87
|
+
"""Connect to Redis.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
True if connected successfully, False otherwise
|
|
91
|
+
"""
|
|
92
|
+
if not self.enabled:
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
if self._connected and self._redis:
|
|
96
|
+
return True
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
# Get Redis settings from environment or use defaults
|
|
100
|
+
redis_host = getattr(self.settings, "redis_host", "localhost")
|
|
101
|
+
redis_port = getattr(self.settings, "redis_port", 6379)
|
|
102
|
+
redis_db = getattr(self.settings, "redis_db", 0)
|
|
103
|
+
redis_password = getattr(self.settings, "redis_password", None)
|
|
104
|
+
|
|
105
|
+
self._redis = redis.Redis(
|
|
106
|
+
host=redis_host,
|
|
107
|
+
port=redis_port,
|
|
108
|
+
db=redis_db,
|
|
109
|
+
password=redis_password,
|
|
110
|
+
decode_responses=True,
|
|
111
|
+
socket_timeout=5.0,
|
|
112
|
+
socket_connect_timeout=5.0,
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Test connection
|
|
116
|
+
await self._redis.ping()
|
|
117
|
+
self._connected = True
|
|
118
|
+
self.logger.info(f"Connected to Redis at {redis_host}:{redis_port}")
|
|
119
|
+
return True
|
|
120
|
+
|
|
121
|
+
except Exception as e:
|
|
122
|
+
self.logger.warning(
|
|
123
|
+
f"Failed to connect to Redis: {e}. Caching disabled for this session."
|
|
124
|
+
)
|
|
125
|
+
self._redis = None
|
|
126
|
+
self._connected = False
|
|
127
|
+
self.enabled = False
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
async def disconnect(self) -> None:
|
|
131
|
+
"""Disconnect from Redis."""
|
|
132
|
+
if self._redis:
|
|
133
|
+
try:
|
|
134
|
+
await self._redis.close()
|
|
135
|
+
self.logger.info("Disconnected from Redis")
|
|
136
|
+
except Exception as e:
|
|
137
|
+
self.logger.error(f"Error disconnecting from Redis: {e}")
|
|
138
|
+
finally:
|
|
139
|
+
self._redis = None
|
|
140
|
+
self._connected = False
|
|
141
|
+
|
|
142
|
+
async def get(self, key: str) -> Any | None:
|
|
143
|
+
"""Get value from cache.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
key: Cache key
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Cached value or None if not found/error
|
|
150
|
+
"""
|
|
151
|
+
if not self.enabled or not self._redis:
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
value = await self._redis.get(key)
|
|
156
|
+
if value:
|
|
157
|
+
self.logger.debug(f"Cache HIT: {key}")
|
|
158
|
+
return json.loads(value)
|
|
159
|
+
else:
|
|
160
|
+
self.logger.debug(f"Cache MISS: {key}")
|
|
161
|
+
return None
|
|
162
|
+
except (RedisError, json.JSONDecodeError) as e:
|
|
163
|
+
self.logger.error(f"Cache get error for key '{key}': {e}")
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
async def set(self, key: str, value: Any, ttl: int | None = None) -> bool:
|
|
167
|
+
"""Set value in cache.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
key: Cache key
|
|
171
|
+
value: Value to cache (must be JSON serializable)
|
|
172
|
+
ttl: Time to live in seconds (optional)
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
True if successful, False otherwise
|
|
176
|
+
"""
|
|
177
|
+
if not self.enabled or not self._redis:
|
|
178
|
+
return False
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
serialized = json.dumps(value)
|
|
182
|
+
if ttl:
|
|
183
|
+
await self._redis.setex(key, ttl, serialized)
|
|
184
|
+
else:
|
|
185
|
+
await self._redis.set(key, serialized)
|
|
186
|
+
self.logger.debug(f"Cache SET: {key} (TTL: {ttl}s)")
|
|
187
|
+
return True
|
|
188
|
+
except (RedisError, TypeError, ValueError) as e:
|
|
189
|
+
self.logger.error(f"Cache set error for key '{key}': {e}")
|
|
190
|
+
return False
|
|
191
|
+
|
|
192
|
+
async def delete(self, key: str) -> bool:
|
|
193
|
+
"""Delete key from cache.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
key: Cache key
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
True if deleted, False otherwise
|
|
200
|
+
"""
|
|
201
|
+
if not self.enabled or not self._redis:
|
|
202
|
+
return False
|
|
203
|
+
|
|
204
|
+
try:
|
|
205
|
+
result = await self._redis.delete(key)
|
|
206
|
+
if result:
|
|
207
|
+
self.logger.debug(f"Cache DELETE: {key}")
|
|
208
|
+
return bool(result)
|
|
209
|
+
except RedisError as e:
|
|
210
|
+
self.logger.error(f"Cache delete error for key '{key}': {e}")
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
async def delete_pattern(self, pattern: str) -> int:
|
|
214
|
+
"""Delete all keys matching a pattern.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
pattern: Redis key pattern (e.g., "sites:*")
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Number of keys deleted
|
|
221
|
+
"""
|
|
222
|
+
if not self.enabled or not self._redis:
|
|
223
|
+
return 0
|
|
224
|
+
|
|
225
|
+
try:
|
|
226
|
+
keys = []
|
|
227
|
+
async for key in self._redis.scan_iter(match=pattern):
|
|
228
|
+
keys.append(key)
|
|
229
|
+
|
|
230
|
+
if keys:
|
|
231
|
+
deleted: int = await self._redis.delete(*keys)
|
|
232
|
+
self.logger.debug(f"Cache DELETE pattern '{pattern}': {deleted} keys")
|
|
233
|
+
return deleted
|
|
234
|
+
return 0
|
|
235
|
+
except RedisError as e:
|
|
236
|
+
self.logger.error(f"Cache delete pattern error for '{pattern}': {e}")
|
|
237
|
+
return 0
|
|
238
|
+
|
|
239
|
+
async def clear(self) -> bool:
|
|
240
|
+
"""Clear all cache data.
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
True if successful, False otherwise
|
|
244
|
+
"""
|
|
245
|
+
if not self.enabled or not self._redis:
|
|
246
|
+
return False
|
|
247
|
+
|
|
248
|
+
try:
|
|
249
|
+
await self._redis.flushdb()
|
|
250
|
+
self.logger.info("Cache CLEARED")
|
|
251
|
+
return True
|
|
252
|
+
except RedisError as e:
|
|
253
|
+
self.logger.error(f"Cache clear error: {e}")
|
|
254
|
+
return False
|
|
255
|
+
|
|
256
|
+
async def exists(self, key: str) -> bool:
|
|
257
|
+
"""Check if key exists in cache.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
key: Cache key
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
True if key exists, False otherwise
|
|
264
|
+
"""
|
|
265
|
+
if not self.enabled or not self._redis:
|
|
266
|
+
return False
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
return bool(await self._redis.exists(key))
|
|
270
|
+
except RedisError as e:
|
|
271
|
+
self.logger.error(f"Cache exists error for key '{key}': {e}")
|
|
272
|
+
return False
|
|
273
|
+
|
|
274
|
+
def build_key(
|
|
275
|
+
self,
|
|
276
|
+
resource_type: str,
|
|
277
|
+
site_id: str | None = None,
|
|
278
|
+
resource_id: str | None = None,
|
|
279
|
+
**kwargs: Any,
|
|
280
|
+
) -> str:
|
|
281
|
+
"""Build a cache key.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
resource_type: Type of resource (sites, devices, clients, etc.)
|
|
285
|
+
site_id: Optional site identifier
|
|
286
|
+
resource_id: Optional resource identifier
|
|
287
|
+
**kwargs: Additional key components
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
Cache key string
|
|
291
|
+
"""
|
|
292
|
+
parts = [resource_type]
|
|
293
|
+
|
|
294
|
+
if site_id:
|
|
295
|
+
parts.append(site_id)
|
|
296
|
+
|
|
297
|
+
if resource_id:
|
|
298
|
+
parts.append(resource_id)
|
|
299
|
+
|
|
300
|
+
# Add additional components
|
|
301
|
+
for key, value in sorted(kwargs.items()):
|
|
302
|
+
if value is not None:
|
|
303
|
+
parts.append(f"{key}:{value}")
|
|
304
|
+
|
|
305
|
+
return ":".join(parts)
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def cached(
|
|
309
|
+
resource_type: str,
|
|
310
|
+
ttl: int | None = None,
|
|
311
|
+
key_builder: Callable[..., str] | None = None,
|
|
312
|
+
) -> Callable[[Callable], Callable]:
|
|
313
|
+
"""Decorator for caching function results.
|
|
314
|
+
|
|
315
|
+
Args:
|
|
316
|
+
resource_type: Type of resource being cached
|
|
317
|
+
ttl: Time to live in seconds (uses CacheConfig if not specified)
|
|
318
|
+
key_builder: Optional custom key builder function
|
|
319
|
+
|
|
320
|
+
Example:
|
|
321
|
+
@cached(resource_type="sites", ttl=300)
|
|
322
|
+
async def get_sites(settings: Settings):
|
|
323
|
+
# Function implementation
|
|
324
|
+
pass
|
|
325
|
+
"""
|
|
326
|
+
|
|
327
|
+
def decorator(func: Callable) -> Callable:
|
|
328
|
+
@wraps(func)
|
|
329
|
+
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
330
|
+
# Extract settings from arguments
|
|
331
|
+
settings = None
|
|
332
|
+
for arg in args:
|
|
333
|
+
if isinstance(arg, Settings):
|
|
334
|
+
settings = arg
|
|
335
|
+
break
|
|
336
|
+
if not settings and "settings" in kwargs:
|
|
337
|
+
settings = kwargs["settings"]
|
|
338
|
+
|
|
339
|
+
if not settings:
|
|
340
|
+
# No settings, can't use cache - call function directly
|
|
341
|
+
return await func(*args, **kwargs)
|
|
342
|
+
|
|
343
|
+
# Initialize cache client
|
|
344
|
+
cache = CacheClient(settings)
|
|
345
|
+
await cache.connect()
|
|
346
|
+
|
|
347
|
+
# Build cache key
|
|
348
|
+
if key_builder:
|
|
349
|
+
cache_key = key_builder(*args, **kwargs)
|
|
350
|
+
else:
|
|
351
|
+
# Default key builder using function name and args
|
|
352
|
+
key_parts = [resource_type, func.__name__]
|
|
353
|
+
# Add site_id if present in kwargs
|
|
354
|
+
if "site_id" in kwargs:
|
|
355
|
+
key_parts.append(kwargs["site_id"])
|
|
356
|
+
cache_key = ":".join(str(p) for p in key_parts if p)
|
|
357
|
+
|
|
358
|
+
# Try to get from cache
|
|
359
|
+
cached_value = await cache.get(cache_key)
|
|
360
|
+
if cached_value is not None:
|
|
361
|
+
await cache.disconnect()
|
|
362
|
+
return cached_value
|
|
363
|
+
|
|
364
|
+
# Call function
|
|
365
|
+
result = await func(*args, **kwargs)
|
|
366
|
+
|
|
367
|
+
# Cache result
|
|
368
|
+
cache_ttl = ttl if ttl is not None else CacheConfig.get_ttl(resource_type)
|
|
369
|
+
await cache.set(cache_key, result, ttl=cache_ttl)
|
|
370
|
+
await cache.disconnect()
|
|
371
|
+
|
|
372
|
+
return result
|
|
373
|
+
|
|
374
|
+
return wrapper
|
|
375
|
+
|
|
376
|
+
return decorator
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
async def warm_cache(settings: Settings) -> dict[str, int]:
|
|
380
|
+
"""Pre-populate cache with frequently accessed data.
|
|
381
|
+
|
|
382
|
+
Args:
|
|
383
|
+
settings: Application settings
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
Dictionary with counts of warmed cache entries per resource type
|
|
387
|
+
"""
|
|
388
|
+
from .api import UniFiClient
|
|
389
|
+
|
|
390
|
+
logger = get_logger(__name__, settings.log_level)
|
|
391
|
+
cache = CacheClient(settings)
|
|
392
|
+
|
|
393
|
+
if not await cache.connect():
|
|
394
|
+
logger.warning("Cache warming skipped - Redis not available")
|
|
395
|
+
return {}
|
|
396
|
+
|
|
397
|
+
warmed = {"sites": 0, "devices": 0, "networks": 0}
|
|
398
|
+
|
|
399
|
+
try:
|
|
400
|
+
async with UniFiClient(settings) as client:
|
|
401
|
+
await client.authenticate()
|
|
402
|
+
|
|
403
|
+
# Warm sites cache
|
|
404
|
+
try:
|
|
405
|
+
response = await client.get("/ea/sites")
|
|
406
|
+
sites = response.get("data", [])
|
|
407
|
+
for site in sites:
|
|
408
|
+
site_id = site.get("id")
|
|
409
|
+
if site_id:
|
|
410
|
+
key = cache.build_key("sites", resource_id=site_id)
|
|
411
|
+
await cache.set(key, site, ttl=CacheConfig.SITES)
|
|
412
|
+
warmed["sites"] += 1
|
|
413
|
+
logger.info(f"Warmed cache for {warmed['sites']} sites")
|
|
414
|
+
except Exception as e:
|
|
415
|
+
logger.error(f"Failed to warm sites cache: {e}")
|
|
416
|
+
|
|
417
|
+
# Warm devices cache for each site
|
|
418
|
+
for site in sites:
|
|
419
|
+
site_id = site.get("id")
|
|
420
|
+
if not site_id:
|
|
421
|
+
continue
|
|
422
|
+
|
|
423
|
+
try:
|
|
424
|
+
response = await client.get(f"/ea/sites/{site_id}/devices")
|
|
425
|
+
devices = response.get("data", [])
|
|
426
|
+
key = cache.build_key("devices", site_id=site_id)
|
|
427
|
+
await cache.set(key, devices, ttl=CacheConfig.DEVICES)
|
|
428
|
+
warmed["devices"] += len(devices)
|
|
429
|
+
except Exception as e:
|
|
430
|
+
logger.error(f"Failed to warm devices cache for site {site_id}: {e}")
|
|
431
|
+
|
|
432
|
+
logger.info(f"Cache warming complete: {warmed}")
|
|
433
|
+
|
|
434
|
+
except Exception as e:
|
|
435
|
+
logger.error(f"Cache warming failed: {e}")
|
|
436
|
+
|
|
437
|
+
finally:
|
|
438
|
+
await cache.disconnect()
|
|
439
|
+
|
|
440
|
+
return warmed
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
async def invalidate_cache(
|
|
444
|
+
settings: Settings,
|
|
445
|
+
resource_type: str | None = None,
|
|
446
|
+
site_id: str | None = None,
|
|
447
|
+
) -> int:
|
|
448
|
+
"""Invalidate cache entries.
|
|
449
|
+
|
|
450
|
+
Args:
|
|
451
|
+
settings: Application settings
|
|
452
|
+
resource_type: Optional resource type to invalidate (all if not specified)
|
|
453
|
+
site_id: Optional site ID to invalidate (all sites if not specified)
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
Number of cache entries invalidated
|
|
457
|
+
"""
|
|
458
|
+
logger = get_logger(__name__, settings.log_level)
|
|
459
|
+
cache = CacheClient(settings)
|
|
460
|
+
|
|
461
|
+
if not await cache.connect():
|
|
462
|
+
logger.warning("Cache invalidation skipped - Redis not available")
|
|
463
|
+
return 0
|
|
464
|
+
|
|
465
|
+
try:
|
|
466
|
+
if resource_type and site_id:
|
|
467
|
+
pattern = f"{resource_type}:{site_id}:*"
|
|
468
|
+
elif resource_type:
|
|
469
|
+
pattern = f"{resource_type}:*"
|
|
470
|
+
elif site_id:
|
|
471
|
+
pattern = f"*:{site_id}:*"
|
|
472
|
+
else:
|
|
473
|
+
# Clear all
|
|
474
|
+
await cache.clear()
|
|
475
|
+
logger.info("Invalidated all cache entries")
|
|
476
|
+
return -1 # Unknown count
|
|
477
|
+
|
|
478
|
+
deleted = await cache.delete_pattern(pattern)
|
|
479
|
+
logger.info(f"Invalidated {deleted} cache entries (pattern: {pattern})")
|
|
480
|
+
return deleted
|
|
481
|
+
|
|
482
|
+
finally:
|
|
483
|
+
await cache.disconnect()
|