iflow-mcp_enuno-unifi-mcp-server 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/METADATA +1282 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/RECORD +81 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/WHEEL +4 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/entry_points.txt +2 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/licenses/LICENSE +201 -0
- src/__init__.py +3 -0
- src/__main__.py +6 -0
- src/api/__init__.py +5 -0
- src/api/client.py +727 -0
- src/api/site_manager_client.py +176 -0
- src/cache.py +483 -0
- src/config/__init__.py +5 -0
- src/config/config.py +321 -0
- src/main.py +2234 -0
- src/models/__init__.py +126 -0
- src/models/acl.py +41 -0
- src/models/backup.py +272 -0
- src/models/client.py +74 -0
- src/models/device.py +53 -0
- src/models/dpi.py +50 -0
- src/models/firewall_policy.py +123 -0
- src/models/firewall_zone.py +28 -0
- src/models/network.py +62 -0
- src/models/qos_profile.py +458 -0
- src/models/radius.py +141 -0
- src/models/reference_data.py +34 -0
- src/models/site.py +59 -0
- src/models/site_manager.py +120 -0
- src/models/topology.py +138 -0
- src/models/traffic_flow.py +137 -0
- src/models/traffic_matching_list.py +56 -0
- src/models/voucher.py +42 -0
- src/models/vpn.py +73 -0
- src/models/wan.py +48 -0
- src/models/zbf_matrix.py +49 -0
- src/resources/__init__.py +8 -0
- src/resources/clients.py +111 -0
- src/resources/devices.py +102 -0
- src/resources/networks.py +93 -0
- src/resources/site_manager.py +64 -0
- src/resources/sites.py +86 -0
- src/tools/__init__.py +25 -0
- src/tools/acls.py +328 -0
- src/tools/application.py +42 -0
- src/tools/backups.py +1173 -0
- src/tools/client_management.py +505 -0
- src/tools/clients.py +203 -0
- src/tools/device_control.py +325 -0
- src/tools/devices.py +354 -0
- src/tools/dpi.py +241 -0
- src/tools/dpi_tools.py +89 -0
- src/tools/firewall.py +417 -0
- src/tools/firewall_policies.py +430 -0
- src/tools/firewall_zones.py +515 -0
- src/tools/network_config.py +388 -0
- src/tools/networks.py +190 -0
- src/tools/port_forwarding.py +263 -0
- src/tools/qos.py +1070 -0
- src/tools/radius.py +763 -0
- src/tools/reference_data.py +107 -0
- src/tools/site_manager.py +466 -0
- src/tools/site_vpn.py +95 -0
- src/tools/sites.py +187 -0
- src/tools/topology.py +406 -0
- src/tools/traffic_flows.py +1062 -0
- src/tools/traffic_matching_lists.py +371 -0
- src/tools/vouchers.py +249 -0
- src/tools/vpn.py +76 -0
- src/tools/wans.py +30 -0
- src/tools/wifi.py +498 -0
- src/tools/zbf_matrix.py +326 -0
- src/utils/__init__.py +88 -0
- src/utils/audit.py +213 -0
- src/utils/exceptions.py +114 -0
- src/utils/helpers.py +159 -0
- src/utils/logger.py +105 -0
- src/utils/sanitize.py +244 -0
- src/utils/validators.py +160 -0
- src/webhooks/__init__.py +6 -0
- src/webhooks/handlers.py +196 -0
- src/webhooks/receiver.py +290 -0
|
@@ -0,0 +1,1062 @@
|
|
|
1
|
+
"""Traffic flow monitoring tools."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import csv
|
|
5
|
+
import json
|
|
6
|
+
from collections.abc import AsyncGenerator
|
|
7
|
+
from datetime import datetime, timedelta, timezone
|
|
8
|
+
from io import StringIO
|
|
9
|
+
from typing import Any, Literal
|
|
10
|
+
from uuid import uuid4
|
|
11
|
+
|
|
12
|
+
from ..api.client import UniFiClient
|
|
13
|
+
from ..config import Settings
|
|
14
|
+
from ..models.traffic_flow import (
|
|
15
|
+
BlockFlowAction,
|
|
16
|
+
ClientFlowAggregation,
|
|
17
|
+
ConnectionState,
|
|
18
|
+
FlowRisk,
|
|
19
|
+
FlowStatistics,
|
|
20
|
+
FlowStreamUpdate,
|
|
21
|
+
TrafficFlow,
|
|
22
|
+
)
|
|
23
|
+
from ..utils import audit_action, get_logger, validate_confirmation
|
|
24
|
+
|
|
25
|
+
logger = get_logger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def get_traffic_flows(
|
|
29
|
+
site_id: str,
|
|
30
|
+
settings: Settings,
|
|
31
|
+
source_ip: str | None = None,
|
|
32
|
+
destination_ip: str | None = None,
|
|
33
|
+
protocol: str | None = None,
|
|
34
|
+
application_id: str | None = None,
|
|
35
|
+
time_range: str = "24h",
|
|
36
|
+
limit: int | None = None,
|
|
37
|
+
offset: int | None = None,
|
|
38
|
+
) -> list[dict]:
|
|
39
|
+
"""Retrieve real-time traffic flows.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
site_id: Site identifier
|
|
43
|
+
settings: Application settings
|
|
44
|
+
source_ip: Filter by source IP
|
|
45
|
+
destination_ip: Filter by destination IP
|
|
46
|
+
protocol: Filter by protocol (tcp/udp/icmp)
|
|
47
|
+
application_id: Filter by DPI application ID
|
|
48
|
+
time_range: Time range for flows (1h, 6h, 12h, 24h, 7d, 30d)
|
|
49
|
+
limit: Maximum number of flows to return
|
|
50
|
+
offset: Number of flows to skip
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
List of traffic flows
|
|
54
|
+
"""
|
|
55
|
+
async with UniFiClient(settings) as client:
|
|
56
|
+
logger.info(f"Retrieving traffic flows for site {site_id}")
|
|
57
|
+
|
|
58
|
+
if not client.is_authenticated:
|
|
59
|
+
await client.authenticate()
|
|
60
|
+
|
|
61
|
+
params: dict[str, Any] = {"time_range": time_range}
|
|
62
|
+
if source_ip:
|
|
63
|
+
params["source_ip"] = source_ip
|
|
64
|
+
if destination_ip:
|
|
65
|
+
params["destination_ip"] = destination_ip
|
|
66
|
+
if protocol:
|
|
67
|
+
params["protocol"] = protocol
|
|
68
|
+
if application_id:
|
|
69
|
+
params["application_id"] = application_id
|
|
70
|
+
if limit:
|
|
71
|
+
params["limit"] = limit
|
|
72
|
+
if offset:
|
|
73
|
+
params["offset"] = offset
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
response = await client.get(
|
|
77
|
+
f"/integration/v1/sites/{site_id}/traffic/flows", params=params
|
|
78
|
+
)
|
|
79
|
+
data = response.get("data", [])
|
|
80
|
+
except Exception as e:
|
|
81
|
+
logger.warning(f"Traffic flows endpoint not available: {e}")
|
|
82
|
+
return []
|
|
83
|
+
|
|
84
|
+
return [TrafficFlow(**flow).model_dump() for flow in data]
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def get_flow_statistics(site_id: str, settings: Settings, time_range: str = "24h") -> dict:
|
|
88
|
+
"""Get aggregate flow statistics.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
site_id: Site identifier
|
|
92
|
+
settings: Application settings
|
|
93
|
+
time_range: Time range for statistics (1h, 6h, 12h, 24h, 7d, 30d)
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
Flow statistics
|
|
97
|
+
"""
|
|
98
|
+
async with UniFiClient(settings) as client:
|
|
99
|
+
logger.info(f"Retrieving flow statistics for site {site_id}")
|
|
100
|
+
|
|
101
|
+
if not client.is_authenticated:
|
|
102
|
+
await client.authenticate()
|
|
103
|
+
|
|
104
|
+
try:
|
|
105
|
+
response = await client.get(
|
|
106
|
+
f"/integration/v1/sites/{site_id}/traffic/flows/statistics",
|
|
107
|
+
params={"time_range": time_range},
|
|
108
|
+
)
|
|
109
|
+
data = response.get("data", response)
|
|
110
|
+
except Exception as e:
|
|
111
|
+
logger.warning(f"Flow statistics endpoint not available: {e}")
|
|
112
|
+
# Return empty statistics
|
|
113
|
+
return FlowStatistics( # type: ignore[no-any-return]
|
|
114
|
+
site_id=site_id,
|
|
115
|
+
time_range=time_range,
|
|
116
|
+
total_flows=0,
|
|
117
|
+
total_bytes_sent=0,
|
|
118
|
+
total_bytes_received=0,
|
|
119
|
+
total_bytes=0,
|
|
120
|
+
total_packets_sent=0,
|
|
121
|
+
total_packets_received=0,
|
|
122
|
+
unique_sources=0,
|
|
123
|
+
unique_destinations=0,
|
|
124
|
+
).model_dump()
|
|
125
|
+
|
|
126
|
+
# Handle empty response (no traffic data)
|
|
127
|
+
if not data or data == {}:
|
|
128
|
+
logger.info(f"No flow statistics available for site {site_id}")
|
|
129
|
+
return FlowStatistics( # type: ignore[no-any-return]
|
|
130
|
+
site_id=site_id,
|
|
131
|
+
time_range=time_range,
|
|
132
|
+
total_flows=0,
|
|
133
|
+
total_bytes_sent=0,
|
|
134
|
+
total_bytes_received=0,
|
|
135
|
+
total_bytes=0,
|
|
136
|
+
total_packets_sent=0,
|
|
137
|
+
total_packets_received=0,
|
|
138
|
+
unique_sources=0,
|
|
139
|
+
unique_destinations=0,
|
|
140
|
+
).model_dump()
|
|
141
|
+
|
|
142
|
+
return FlowStatistics(**data).model_dump() # type: ignore[no-any-return]
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
async def get_traffic_flow_details(site_id: str, flow_id: str, settings: Settings) -> dict:
|
|
146
|
+
"""Get details for a specific traffic flow.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
site_id: Site identifier
|
|
150
|
+
flow_id: Flow identifier
|
|
151
|
+
settings: Application settings
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
Traffic flow details
|
|
155
|
+
"""
|
|
156
|
+
async with UniFiClient(settings) as client:
|
|
157
|
+
logger.info(f"Retrieving traffic flow {flow_id} for site {site_id}")
|
|
158
|
+
|
|
159
|
+
if not client.is_authenticated:
|
|
160
|
+
await client.authenticate()
|
|
161
|
+
|
|
162
|
+
try:
|
|
163
|
+
response = await client.get(f"/integration/v1/sites/{site_id}/traffic/flows/{flow_id}")
|
|
164
|
+
data = response.get("data", response)
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.warning(f"Traffic flow details endpoint not available: {e}")
|
|
167
|
+
raise
|
|
168
|
+
|
|
169
|
+
return TrafficFlow(**data).model_dump() # type: ignore[no-any-return]
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
async def get_top_flows(
|
|
173
|
+
site_id: str,
|
|
174
|
+
settings: Settings,
|
|
175
|
+
limit: int = 10,
|
|
176
|
+
time_range: str = "24h",
|
|
177
|
+
sort_by: str = "bytes",
|
|
178
|
+
) -> list[dict]:
|
|
179
|
+
"""Get top bandwidth-consuming flows.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
site_id: Site identifier
|
|
183
|
+
settings: Application settings
|
|
184
|
+
limit: Number of top flows to return
|
|
185
|
+
time_range: Time range for flows
|
|
186
|
+
sort_by: Sort by field (bytes, packets, duration)
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
List of top flows
|
|
190
|
+
"""
|
|
191
|
+
async with UniFiClient(settings) as client:
|
|
192
|
+
logger.info(f"Retrieving top flows for site {site_id}")
|
|
193
|
+
|
|
194
|
+
if not client.is_authenticated:
|
|
195
|
+
await client.authenticate()
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
response = await client.get(
|
|
199
|
+
f"/integration/v1/sites/{site_id}/traffic/flows/top",
|
|
200
|
+
params={"limit": limit, "time_range": time_range, "sort_by": sort_by},
|
|
201
|
+
)
|
|
202
|
+
data = response.get("data", [])
|
|
203
|
+
except Exception:
|
|
204
|
+
# Fallback: get all flows and sort manually
|
|
205
|
+
logger.info("Top flows endpoint not available, fetching all flows")
|
|
206
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
207
|
+
# Sort by total bytes
|
|
208
|
+
sorted_flows = sorted(
|
|
209
|
+
flows,
|
|
210
|
+
key=lambda x: x.get("bytes_sent", 0) + x.get("bytes_received", 0),
|
|
211
|
+
reverse=True,
|
|
212
|
+
)
|
|
213
|
+
return sorted_flows[:limit]
|
|
214
|
+
|
|
215
|
+
return [TrafficFlow(**flow).model_dump() for flow in data]
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
async def get_flow_risks(
|
|
219
|
+
site_id: str,
|
|
220
|
+
settings: Settings,
|
|
221
|
+
time_range: str = "24h",
|
|
222
|
+
min_risk_level: str | None = None,
|
|
223
|
+
) -> list[dict]:
|
|
224
|
+
"""Get risk assessment for flows.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
site_id: Site identifier
|
|
228
|
+
settings: Application settings
|
|
229
|
+
time_range: Time range for flows
|
|
230
|
+
min_risk_level: Minimum risk level to include (low/medium/high/critical)
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
List of flows with risk assessments
|
|
234
|
+
"""
|
|
235
|
+
async with UniFiClient(settings) as client:
|
|
236
|
+
logger.info(f"Retrieving flow risks for site {site_id}")
|
|
237
|
+
|
|
238
|
+
if not client.is_authenticated:
|
|
239
|
+
await client.authenticate()
|
|
240
|
+
|
|
241
|
+
params = {"time_range": time_range}
|
|
242
|
+
if min_risk_level:
|
|
243
|
+
params["min_risk_level"] = min_risk_level
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
response = await client.get(
|
|
247
|
+
f"/integration/v1/sites/{site_id}/traffic/flows/risks", params=params
|
|
248
|
+
)
|
|
249
|
+
data = response.get("data", [])
|
|
250
|
+
except Exception:
|
|
251
|
+
logger.warning("Flow risks endpoint not available")
|
|
252
|
+
return []
|
|
253
|
+
|
|
254
|
+
return [FlowRisk(**risk).model_dump() for risk in data]
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
async def get_flow_trends(
|
|
258
|
+
site_id: str,
|
|
259
|
+
settings: Settings,
|
|
260
|
+
time_range: str = "7d",
|
|
261
|
+
interval: str = "1h",
|
|
262
|
+
) -> list[dict]:
|
|
263
|
+
"""Get historical flow trends.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
site_id: Site identifier
|
|
267
|
+
settings: Application settings
|
|
268
|
+
time_range: Time range for trends (default: 7d)
|
|
269
|
+
interval: Time interval for data points (1h, 6h, 1d)
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
List of trend data points
|
|
273
|
+
"""
|
|
274
|
+
async with UniFiClient(settings) as client:
|
|
275
|
+
logger.info(f"Retrieving flow trends for site {site_id}")
|
|
276
|
+
|
|
277
|
+
if not client.is_authenticated:
|
|
278
|
+
await client.authenticate()
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
response = await client.get(
|
|
282
|
+
f"/integration/v1/sites/{site_id}/traffic/flows/trends",
|
|
283
|
+
params={"time_range": time_range, "interval": interval},
|
|
284
|
+
)
|
|
285
|
+
data = response.get("data", [])
|
|
286
|
+
except Exception:
|
|
287
|
+
logger.warning("Flow trends endpoint not available")
|
|
288
|
+
return []
|
|
289
|
+
|
|
290
|
+
return data # type: ignore[no-any-return]
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
async def filter_traffic_flows(
|
|
294
|
+
site_id: str,
|
|
295
|
+
settings: Settings,
|
|
296
|
+
filter_expression: str,
|
|
297
|
+
time_range: str = "24h",
|
|
298
|
+
limit: int | None = None,
|
|
299
|
+
) -> list[dict]:
|
|
300
|
+
"""Filter flows using a complex filter expression.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
site_id: Site identifier
|
|
304
|
+
settings: Application settings
|
|
305
|
+
filter_expression: Filter expression (e.g., "bytes > 1000000 AND protocol = 'tcp'")
|
|
306
|
+
time_range: Time range for flows
|
|
307
|
+
limit: Maximum number of flows to return
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
List of filtered traffic flows
|
|
311
|
+
"""
|
|
312
|
+
async with UniFiClient(settings) as client:
|
|
313
|
+
logger.info(
|
|
314
|
+
f"Filtering traffic flows for site {site_id} with expression: {filter_expression}"
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
if not client.is_authenticated:
|
|
318
|
+
await client.authenticate()
|
|
319
|
+
|
|
320
|
+
params: dict[str, Any] = {"filter": filter_expression, "time_range": time_range}
|
|
321
|
+
if limit:
|
|
322
|
+
params["limit"] = limit
|
|
323
|
+
|
|
324
|
+
try:
|
|
325
|
+
response = await client.get(
|
|
326
|
+
f"/integration/v1/sites/{site_id}/traffic/flows", params=params
|
|
327
|
+
)
|
|
328
|
+
data = response.get("data", [])
|
|
329
|
+
except Exception:
|
|
330
|
+
logger.warning("Filtered flows endpoint not available, using basic filtering")
|
|
331
|
+
# Fallback to basic filtering
|
|
332
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
333
|
+
# Simple filtering - in production, would use a proper query parser
|
|
334
|
+
return flows[:limit] if limit else flows
|
|
335
|
+
|
|
336
|
+
return [TrafficFlow(**flow).model_dump() for flow in data]
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
async def stream_traffic_flows(
|
|
340
|
+
site_id: str,
|
|
341
|
+
settings: Settings,
|
|
342
|
+
interval_seconds: int = 15,
|
|
343
|
+
filter_expression: str | None = None,
|
|
344
|
+
) -> AsyncGenerator[dict, None]:
|
|
345
|
+
"""Stream real-time traffic flow updates.
|
|
346
|
+
|
|
347
|
+
This function attempts to use WebSocket for real-time updates,
|
|
348
|
+
falling back to polling if WebSocket is unavailable.
|
|
349
|
+
|
|
350
|
+
Args:
|
|
351
|
+
site_id: Site identifier
|
|
352
|
+
settings: Application settings
|
|
353
|
+
interval_seconds: Update interval in seconds (default: 15)
|
|
354
|
+
filter_expression: Optional filter expression
|
|
355
|
+
|
|
356
|
+
Yields:
|
|
357
|
+
Flow stream updates with bandwidth rates
|
|
358
|
+
"""
|
|
359
|
+
async with UniFiClient(settings) as client:
|
|
360
|
+
logger.info(f"Starting traffic flow stream for site {site_id}")
|
|
361
|
+
|
|
362
|
+
if not client.is_authenticated:
|
|
363
|
+
await client.authenticate()
|
|
364
|
+
|
|
365
|
+
# Track previous flows for rate calculation
|
|
366
|
+
previous_flows: dict[str, TrafficFlow] = {}
|
|
367
|
+
|
|
368
|
+
# Try WebSocket first (if available in future)
|
|
369
|
+
# For now, use polling fallback
|
|
370
|
+
logger.info(f"Using polling fallback with {interval_seconds}s interval")
|
|
371
|
+
|
|
372
|
+
while True:
|
|
373
|
+
try:
|
|
374
|
+
# Get current flows
|
|
375
|
+
params: dict[str, Any] = {}
|
|
376
|
+
if filter_expression:
|
|
377
|
+
params["filter"] = filter_expression
|
|
378
|
+
|
|
379
|
+
response = await client.get(
|
|
380
|
+
f"/integration/v1/sites/{site_id}/traffic/flows", params=params
|
|
381
|
+
)
|
|
382
|
+
data = response.get("data", [])
|
|
383
|
+
|
|
384
|
+
current_time = datetime.now(timezone.utc).isoformat()
|
|
385
|
+
|
|
386
|
+
for flow_data in data:
|
|
387
|
+
flow = TrafficFlow(**flow_data)
|
|
388
|
+
flow_id = flow.flow_id
|
|
389
|
+
|
|
390
|
+
# Determine update type
|
|
391
|
+
if flow_id in previous_flows:
|
|
392
|
+
update_type_str: Literal["new", "update", "closed"] = "update"
|
|
393
|
+
# Calculate bandwidth rate
|
|
394
|
+
prev_flow = previous_flows[flow_id]
|
|
395
|
+
bytes_diff = (flow.bytes_sent + flow.bytes_received) - (
|
|
396
|
+
prev_flow.bytes_sent + prev_flow.bytes_received
|
|
397
|
+
)
|
|
398
|
+
bandwidth_rate = {
|
|
399
|
+
"bps": bytes_diff * 8 / interval_seconds,
|
|
400
|
+
"upload_bps": (flow.bytes_sent - prev_flow.bytes_sent)
|
|
401
|
+
* 8
|
|
402
|
+
/ interval_seconds,
|
|
403
|
+
"download_bps": (flow.bytes_received - prev_flow.bytes_received)
|
|
404
|
+
* 8
|
|
405
|
+
/ interval_seconds,
|
|
406
|
+
}
|
|
407
|
+
else:
|
|
408
|
+
update_type_str = "new"
|
|
409
|
+
bandwidth_rate = None
|
|
410
|
+
|
|
411
|
+
# Create stream update
|
|
412
|
+
update = FlowStreamUpdate(
|
|
413
|
+
update_type=update_type_str,
|
|
414
|
+
flow=flow,
|
|
415
|
+
timestamp=current_time,
|
|
416
|
+
bandwidth_rate=bandwidth_rate,
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
yield update.model_dump()
|
|
420
|
+
|
|
421
|
+
# Update tracking
|
|
422
|
+
previous_flows[flow_id] = flow
|
|
423
|
+
|
|
424
|
+
# Check for closed flows
|
|
425
|
+
current_flow_ids = {flow.flow_id for flow in data}
|
|
426
|
+
for prev_flow_id in list(previous_flows.keys()):
|
|
427
|
+
if prev_flow_id not in current_flow_ids:
|
|
428
|
+
closed_flow = previous_flows.pop(prev_flow_id)
|
|
429
|
+
closed_update_type: Literal["new", "update", "closed"] = "closed"
|
|
430
|
+
update = FlowStreamUpdate(
|
|
431
|
+
update_type=closed_update_type,
|
|
432
|
+
flow=closed_flow,
|
|
433
|
+
timestamp=current_time,
|
|
434
|
+
bandwidth_rate=None,
|
|
435
|
+
)
|
|
436
|
+
yield update.model_dump()
|
|
437
|
+
|
|
438
|
+
# Wait for next interval
|
|
439
|
+
await asyncio.sleep(interval_seconds)
|
|
440
|
+
|
|
441
|
+
except Exception as e:
|
|
442
|
+
logger.error(f"Error in flow streaming: {e}")
|
|
443
|
+
await asyncio.sleep(interval_seconds)
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
async def get_connection_states(
|
|
447
|
+
site_id: str,
|
|
448
|
+
settings: Settings,
|
|
449
|
+
time_range: str = "1h",
|
|
450
|
+
) -> list[dict]:
|
|
451
|
+
"""Get connection states for all flows.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
site_id: Site identifier
|
|
455
|
+
settings: Application settings
|
|
456
|
+
time_range: Time range for flows
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
List of connection states
|
|
460
|
+
"""
|
|
461
|
+
async with UniFiClient(settings) as client:
|
|
462
|
+
logger.info(f"Retrieving connection states for site {site_id}")
|
|
463
|
+
|
|
464
|
+
if not client.is_authenticated:
|
|
465
|
+
await client.authenticate()
|
|
466
|
+
|
|
467
|
+
# Get flows
|
|
468
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
469
|
+
|
|
470
|
+
# Determine connection states
|
|
471
|
+
states = []
|
|
472
|
+
current_time = datetime.now(timezone.utc)
|
|
473
|
+
|
|
474
|
+
for flow in flows:
|
|
475
|
+
flow_obj = TrafficFlow(**flow)
|
|
476
|
+
|
|
477
|
+
# Determine state based on end_time
|
|
478
|
+
if flow_obj.end_time:
|
|
479
|
+
state_val: Literal["active", "closed", "timed_out"] = "closed"
|
|
480
|
+
termination_reason = "normal_closure"
|
|
481
|
+
else:
|
|
482
|
+
# Check if flow is timed out (no activity in last 5 minutes)
|
|
483
|
+
last_seen = datetime.fromisoformat(flow_obj.start_time.replace("Z", "+00:00"))
|
|
484
|
+
if (current_time - last_seen).total_seconds() > 300:
|
|
485
|
+
state_val = "timed_out"
|
|
486
|
+
termination_reason = "timeout"
|
|
487
|
+
else:
|
|
488
|
+
state_val = "active"
|
|
489
|
+
termination_reason = None
|
|
490
|
+
|
|
491
|
+
connection_state = ConnectionState(
|
|
492
|
+
flow_id=flow_obj.flow_id,
|
|
493
|
+
state=state_val,
|
|
494
|
+
last_seen=flow_obj.end_time or flow_obj.start_time,
|
|
495
|
+
total_duration=flow_obj.duration,
|
|
496
|
+
termination_reason=termination_reason,
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
states.append(connection_state.model_dump())
|
|
500
|
+
|
|
501
|
+
return states
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
async def get_client_flow_aggregation(
|
|
505
|
+
site_id: str,
|
|
506
|
+
client_mac: str,
|
|
507
|
+
settings: Settings,
|
|
508
|
+
time_range: str = "24h",
|
|
509
|
+
) -> dict:
|
|
510
|
+
"""Get aggregated flow data for a specific client.
|
|
511
|
+
|
|
512
|
+
Args:
|
|
513
|
+
site_id: Site identifier
|
|
514
|
+
client_mac: Client MAC address
|
|
515
|
+
settings: Application settings
|
|
516
|
+
time_range: Time range for aggregation
|
|
517
|
+
|
|
518
|
+
Returns:
|
|
519
|
+
Client flow aggregation data
|
|
520
|
+
"""
|
|
521
|
+
async with UniFiClient(settings) as client:
|
|
522
|
+
logger.info(f"Retrieving flow aggregation for client {client_mac}")
|
|
523
|
+
|
|
524
|
+
if not client.is_authenticated:
|
|
525
|
+
await client.authenticate()
|
|
526
|
+
|
|
527
|
+
# Get flows for this client
|
|
528
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
529
|
+
client_flows = [f for f in flows if f.get("client_mac") == client_mac]
|
|
530
|
+
|
|
531
|
+
# Get connection states
|
|
532
|
+
states = await get_connection_states(site_id, settings, time_range=time_range)
|
|
533
|
+
client_states = [
|
|
534
|
+
s for s in states if any(f["flow_id"] == s["flow_id"] for f in client_flows)
|
|
535
|
+
]
|
|
536
|
+
|
|
537
|
+
# Aggregate statistics
|
|
538
|
+
total_bytes = sum(f.get("bytes_sent", 0) + f.get("bytes_received", 0) for f in client_flows)
|
|
539
|
+
total_packets = sum(
|
|
540
|
+
f.get("packets_sent", 0) + f.get("packets_received", 0) for f in client_flows
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
active_flows = len([s for s in client_states if s["state"] == "active"])
|
|
544
|
+
closed_flows = len([s for s in client_states if s["state"] == "closed"])
|
|
545
|
+
|
|
546
|
+
# Top applications
|
|
547
|
+
app_bytes: dict[str, int] = {}
|
|
548
|
+
for flow in client_flows:
|
|
549
|
+
app_name = flow.get("application_name", "Unknown")
|
|
550
|
+
app_bytes[app_name] = (
|
|
551
|
+
app_bytes.get(app_name, 0)
|
|
552
|
+
+ flow.get("bytes_sent", 0)
|
|
553
|
+
+ flow.get("bytes_received", 0)
|
|
554
|
+
)
|
|
555
|
+
|
|
556
|
+
top_applications = [
|
|
557
|
+
{"application": app, "bytes": bytes_val}
|
|
558
|
+
for app, bytes_val in sorted(app_bytes.items(), key=lambda x: x[1], reverse=True)[:10]
|
|
559
|
+
]
|
|
560
|
+
|
|
561
|
+
# Top destinations
|
|
562
|
+
dest_bytes: dict[str, int] = {}
|
|
563
|
+
for flow in client_flows:
|
|
564
|
+
dest_ip = flow.get("destination_ip", "Unknown")
|
|
565
|
+
dest_bytes[dest_ip] = (
|
|
566
|
+
dest_bytes.get(dest_ip, 0)
|
|
567
|
+
+ flow.get("bytes_sent", 0)
|
|
568
|
+
+ flow.get("bytes_received", 0)
|
|
569
|
+
)
|
|
570
|
+
|
|
571
|
+
top_destinations = [
|
|
572
|
+
{"destination_ip": dest, "bytes": bytes_val}
|
|
573
|
+
for dest, bytes_val in sorted(dest_bytes.items(), key=lambda x: x[1], reverse=True)[:10]
|
|
574
|
+
]
|
|
575
|
+
|
|
576
|
+
# Get client IP from first flow
|
|
577
|
+
client_ip = client_flows[0].get("source_ip") if client_flows else None
|
|
578
|
+
|
|
579
|
+
# Auth failures would come from a separate endpoint
|
|
580
|
+
# For now, set to 0 as placeholder
|
|
581
|
+
auth_failures = 0
|
|
582
|
+
|
|
583
|
+
aggregation = ClientFlowAggregation(
|
|
584
|
+
client_mac=client_mac,
|
|
585
|
+
client_ip=client_ip,
|
|
586
|
+
site_id=site_id,
|
|
587
|
+
total_flows=len(client_flows),
|
|
588
|
+
total_bytes=total_bytes,
|
|
589
|
+
total_packets=total_packets,
|
|
590
|
+
active_flows=active_flows,
|
|
591
|
+
closed_flows=closed_flows,
|
|
592
|
+
auth_failures=auth_failures,
|
|
593
|
+
top_applications=top_applications,
|
|
594
|
+
top_destinations=top_destinations,
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
return aggregation.model_dump() # type: ignore[no-any-return]
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
async def block_flow_source_ip(
|
|
601
|
+
site_id: str,
|
|
602
|
+
flow_id: str,
|
|
603
|
+
settings: Settings,
|
|
604
|
+
duration: str = "permanent",
|
|
605
|
+
expires_in_hours: int | None = None,
|
|
606
|
+
confirm: bool = False,
|
|
607
|
+
dry_run: bool = False,
|
|
608
|
+
) -> dict:
|
|
609
|
+
"""Block source IP address from a traffic flow.
|
|
610
|
+
|
|
611
|
+
Args:
|
|
612
|
+
site_id: Site identifier
|
|
613
|
+
flow_id: Flow identifier to block
|
|
614
|
+
settings: Application settings
|
|
615
|
+
duration: Block duration ("permanent" or "temporary")
|
|
616
|
+
expires_in_hours: Hours until expiration (for temporary blocks)
|
|
617
|
+
confirm: Confirmation flag (required)
|
|
618
|
+
dry_run: If True, validate but don't execute
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
Block action result
|
|
622
|
+
"""
|
|
623
|
+
validate_confirmation(confirm, "block flow source IP")
|
|
624
|
+
|
|
625
|
+
async with UniFiClient(settings) as client:
|
|
626
|
+
logger.info(f"Blocking source IP from flow {flow_id}")
|
|
627
|
+
|
|
628
|
+
if not client.is_authenticated:
|
|
629
|
+
await client.authenticate()
|
|
630
|
+
|
|
631
|
+
# Get flow details
|
|
632
|
+
flow_data = await get_traffic_flow_details(site_id, flow_id, settings)
|
|
633
|
+
source_ip = flow_data.get("source_ip")
|
|
634
|
+
|
|
635
|
+
if not source_ip:
|
|
636
|
+
raise ValueError(f"No source IP found for flow {flow_id}")
|
|
637
|
+
|
|
638
|
+
# Calculate expiration
|
|
639
|
+
expires_at = None
|
|
640
|
+
if duration == "temporary" and expires_in_hours:
|
|
641
|
+
expires_at = (
|
|
642
|
+
datetime.now(timezone.utc) + timedelta(hours=expires_in_hours)
|
|
643
|
+
).isoformat()
|
|
644
|
+
|
|
645
|
+
# Create firewall rule to block this IP
|
|
646
|
+
from .firewall import create_firewall_rule
|
|
647
|
+
|
|
648
|
+
rule_name = f"Block_{source_ip}_{flow_id[:8]}"
|
|
649
|
+
|
|
650
|
+
if dry_run:
|
|
651
|
+
logger.info(f"[DRY RUN] Would block source IP {source_ip}")
|
|
652
|
+
action_id = str(uuid4())
|
|
653
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
654
|
+
action_id=action_id,
|
|
655
|
+
block_type="source_ip",
|
|
656
|
+
blocked_target=source_ip,
|
|
657
|
+
rule_id=None,
|
|
658
|
+
zone_id=None,
|
|
659
|
+
duration=duration,
|
|
660
|
+
expires_at=expires_at,
|
|
661
|
+
created_at=datetime.now(timezone.utc).isoformat(),
|
|
662
|
+
).model_dump()
|
|
663
|
+
|
|
664
|
+
# Create blocking rule
|
|
665
|
+
rule_result = await create_firewall_rule(
|
|
666
|
+
site_id=site_id,
|
|
667
|
+
name=rule_name,
|
|
668
|
+
action="drop",
|
|
669
|
+
protocol="all",
|
|
670
|
+
settings=settings,
|
|
671
|
+
source=source_ip,
|
|
672
|
+
enabled=True,
|
|
673
|
+
confirm=True,
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
rule_id = rule_result.get("_id")
|
|
677
|
+
action_id = str(uuid4())
|
|
678
|
+
|
|
679
|
+
# Audit the action
|
|
680
|
+
await audit_action(
|
|
681
|
+
settings,
|
|
682
|
+
action_type="block_flow_source_ip",
|
|
683
|
+
resource_type="flow_block_action",
|
|
684
|
+
resource_id=action_id,
|
|
685
|
+
site_id=site_id,
|
|
686
|
+
details={"flow_id": flow_id, "source_ip": source_ip, "rule_id": rule_id},
|
|
687
|
+
)
|
|
688
|
+
|
|
689
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
690
|
+
action_id=action_id,
|
|
691
|
+
block_type="source_ip",
|
|
692
|
+
blocked_target=source_ip,
|
|
693
|
+
rule_id=rule_id,
|
|
694
|
+
zone_id=None,
|
|
695
|
+
duration=duration,
|
|
696
|
+
expires_at=expires_at,
|
|
697
|
+
created_at=datetime.now(timezone.utc).isoformat(),
|
|
698
|
+
).model_dump()
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
async def block_flow_destination_ip(
|
|
702
|
+
site_id: str,
|
|
703
|
+
flow_id: str,
|
|
704
|
+
settings: Settings,
|
|
705
|
+
duration: str = "permanent",
|
|
706
|
+
expires_in_hours: int | None = None,
|
|
707
|
+
confirm: bool = False,
|
|
708
|
+
dry_run: bool = False,
|
|
709
|
+
) -> dict:
|
|
710
|
+
"""Block destination IP address from a traffic flow.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
site_id: Site identifier
|
|
714
|
+
flow_id: Flow identifier to block
|
|
715
|
+
settings: Application settings
|
|
716
|
+
duration: Block duration ("permanent" or "temporary")
|
|
717
|
+
expires_in_hours: Hours until expiration (for temporary blocks)
|
|
718
|
+
confirm: Confirmation flag (required)
|
|
719
|
+
dry_run: If True, validate but don't execute
|
|
720
|
+
|
|
721
|
+
Returns:
|
|
722
|
+
Block action result
|
|
723
|
+
"""
|
|
724
|
+
validate_confirmation(confirm, "block flow destination IP")
|
|
725
|
+
|
|
726
|
+
async with UniFiClient(settings) as client:
|
|
727
|
+
logger.info(f"Blocking destination IP from flow {flow_id}")
|
|
728
|
+
|
|
729
|
+
if not client.is_authenticated:
|
|
730
|
+
await client.authenticate()
|
|
731
|
+
|
|
732
|
+
# Get flow details
|
|
733
|
+
flow_data = await get_traffic_flow_details(site_id, flow_id, settings)
|
|
734
|
+
destination_ip = flow_data.get("destination_ip")
|
|
735
|
+
|
|
736
|
+
if not destination_ip:
|
|
737
|
+
raise ValueError(f"No destination IP found for flow {flow_id}")
|
|
738
|
+
|
|
739
|
+
# Calculate expiration
|
|
740
|
+
expires_at = None
|
|
741
|
+
if duration == "temporary" and expires_in_hours:
|
|
742
|
+
expires_at = (
|
|
743
|
+
datetime.now(timezone.utc) + timedelta(hours=expires_in_hours)
|
|
744
|
+
).isoformat()
|
|
745
|
+
|
|
746
|
+
# Create firewall rule to block this IP
|
|
747
|
+
from .firewall import create_firewall_rule
|
|
748
|
+
|
|
749
|
+
rule_name = f"Block_{destination_ip}_{flow_id[:8]}"
|
|
750
|
+
|
|
751
|
+
if dry_run:
|
|
752
|
+
logger.info(f"[DRY RUN] Would block destination IP {destination_ip}")
|
|
753
|
+
action_id = str(uuid4())
|
|
754
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
755
|
+
action_id=action_id,
|
|
756
|
+
block_type="destination_ip",
|
|
757
|
+
blocked_target=destination_ip,
|
|
758
|
+
rule_id=None,
|
|
759
|
+
zone_id=None,
|
|
760
|
+
duration=duration,
|
|
761
|
+
expires_at=expires_at,
|
|
762
|
+
created_at=datetime.now(timezone.utc).isoformat(),
|
|
763
|
+
).model_dump()
|
|
764
|
+
|
|
765
|
+
# Create blocking rule
|
|
766
|
+
rule_result = await create_firewall_rule(
|
|
767
|
+
site_id=site_id,
|
|
768
|
+
name=rule_name,
|
|
769
|
+
action="drop",
|
|
770
|
+
protocol="all",
|
|
771
|
+
settings=settings,
|
|
772
|
+
destination=destination_ip,
|
|
773
|
+
enabled=True,
|
|
774
|
+
confirm=True,
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
rule_id = rule_result.get("_id")
|
|
778
|
+
action_id = str(uuid4())
|
|
779
|
+
|
|
780
|
+
# Audit the action
|
|
781
|
+
await audit_action(
|
|
782
|
+
settings,
|
|
783
|
+
action_type="block_flow_destination_ip",
|
|
784
|
+
resource_type="flow_block_action",
|
|
785
|
+
resource_id=action_id,
|
|
786
|
+
site_id=site_id,
|
|
787
|
+
details={"flow_id": flow_id, "destination_ip": destination_ip, "rule_id": rule_id},
|
|
788
|
+
)
|
|
789
|
+
|
|
790
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
791
|
+
action_id=action_id,
|
|
792
|
+
block_type="destination_ip",
|
|
793
|
+
blocked_target=destination_ip,
|
|
794
|
+
rule_id=rule_id,
|
|
795
|
+
zone_id=None,
|
|
796
|
+
duration=duration,
|
|
797
|
+
expires_at=expires_at,
|
|
798
|
+
created_at=datetime.now(timezone.utc).isoformat(),
|
|
799
|
+
).model_dump()
|
|
800
|
+
|
|
801
|
+
|
|
802
|
+
async def block_flow_application(
|
|
803
|
+
site_id: str,
|
|
804
|
+
flow_id: str,
|
|
805
|
+
settings: Settings,
|
|
806
|
+
use_zbf: bool = True,
|
|
807
|
+
zone_id: str | None = None,
|
|
808
|
+
confirm: bool = False,
|
|
809
|
+
dry_run: bool = False,
|
|
810
|
+
) -> dict:
|
|
811
|
+
"""Block application identified in a traffic flow.
|
|
812
|
+
|
|
813
|
+
Args:
|
|
814
|
+
site_id: Site identifier
|
|
815
|
+
flow_id: Flow identifier to block
|
|
816
|
+
settings: Application settings
|
|
817
|
+
use_zbf: Use Zone-Based Firewall if available (default: True)
|
|
818
|
+
zone_id: Zone ID for ZBF blocking (optional)
|
|
819
|
+
confirm: Confirmation flag (required)
|
|
820
|
+
dry_run: If True, validate but don't execute
|
|
821
|
+
|
|
822
|
+
Returns:
|
|
823
|
+
Block action result
|
|
824
|
+
"""
|
|
825
|
+
validate_confirmation(confirm, "block flow application")
|
|
826
|
+
|
|
827
|
+
async with UniFiClient(settings) as client:
|
|
828
|
+
logger.info(f"Blocking application from flow {flow_id}")
|
|
829
|
+
|
|
830
|
+
if not client.is_authenticated:
|
|
831
|
+
await client.authenticate()
|
|
832
|
+
|
|
833
|
+
# Get flow details
|
|
834
|
+
flow_data = await get_traffic_flow_details(site_id, flow_id, settings)
|
|
835
|
+
application_id = flow_data.get("application_id")
|
|
836
|
+
application_name = flow_data.get("application_name", "Unknown")
|
|
837
|
+
|
|
838
|
+
if not application_id:
|
|
839
|
+
raise ValueError(f"No application ID found for flow {flow_id}")
|
|
840
|
+
|
|
841
|
+
action_id = str(uuid4())
|
|
842
|
+
created_at = datetime.now(timezone.utc).isoformat()
|
|
843
|
+
|
|
844
|
+
if dry_run:
|
|
845
|
+
logger.info(f"[DRY RUN] Would block application {application_name} ({application_id})")
|
|
846
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
847
|
+
action_id=action_id,
|
|
848
|
+
block_type="application",
|
|
849
|
+
blocked_target=application_id,
|
|
850
|
+
rule_id=None,
|
|
851
|
+
zone_id=zone_id if use_zbf else None,
|
|
852
|
+
duration="permanent",
|
|
853
|
+
expires_at=None,
|
|
854
|
+
created_at=created_at,
|
|
855
|
+
).model_dump()
|
|
856
|
+
|
|
857
|
+
rule_id = None
|
|
858
|
+
result_zone_id = None
|
|
859
|
+
|
|
860
|
+
# Try ZBF blocking first if requested
|
|
861
|
+
if use_zbf:
|
|
862
|
+
try:
|
|
863
|
+
from .zbf_matrix import block_application_by_zone
|
|
864
|
+
|
|
865
|
+
# If no zone specified, try to get a default zone
|
|
866
|
+
if not zone_id:
|
|
867
|
+
from .firewall_zones import list_firewall_zones
|
|
868
|
+
|
|
869
|
+
zones = await list_firewall_zones(site_id, settings)
|
|
870
|
+
if zones:
|
|
871
|
+
zone_id = zones[0].get("id")
|
|
872
|
+
|
|
873
|
+
if zone_id:
|
|
874
|
+
await block_application_by_zone(
|
|
875
|
+
site_id=site_id,
|
|
876
|
+
zone_id=zone_id,
|
|
877
|
+
application_id=application_id,
|
|
878
|
+
settings=settings,
|
|
879
|
+
action="block",
|
|
880
|
+
confirm=True,
|
|
881
|
+
)
|
|
882
|
+
result_zone_id = zone_id
|
|
883
|
+
logger.info(f"Blocked application using ZBF in zone {zone_id}")
|
|
884
|
+
except Exception as e:
|
|
885
|
+
logger.warning(f"ZBF blocking failed, falling back to traditional firewall: {e}")
|
|
886
|
+
use_zbf = False
|
|
887
|
+
|
|
888
|
+
# Fallback to traditional firewall rule
|
|
889
|
+
if not use_zbf or not zone_id:
|
|
890
|
+
from .firewall import create_firewall_rule
|
|
891
|
+
|
|
892
|
+
rule_name = f"Block_App_{application_name}_{flow_id[:8]}"
|
|
893
|
+
|
|
894
|
+
rule_result = await create_firewall_rule(
|
|
895
|
+
site_id=site_id,
|
|
896
|
+
name=rule_name,
|
|
897
|
+
action="drop",
|
|
898
|
+
protocol="all",
|
|
899
|
+
settings=settings,
|
|
900
|
+
enabled=True,
|
|
901
|
+
confirm=True,
|
|
902
|
+
)
|
|
903
|
+
rule_id = rule_result.get("_id")
|
|
904
|
+
|
|
905
|
+
# Audit the action
|
|
906
|
+
await audit_action(
|
|
907
|
+
settings,
|
|
908
|
+
action_type="block_flow_application",
|
|
909
|
+
resource_type="flow_block_action",
|
|
910
|
+
resource_id=action_id,
|
|
911
|
+
site_id=site_id,
|
|
912
|
+
details={
|
|
913
|
+
"flow_id": flow_id,
|
|
914
|
+
"application_id": application_id,
|
|
915
|
+
"application_name": application_name,
|
|
916
|
+
"rule_id": rule_id,
|
|
917
|
+
"zone_id": result_zone_id,
|
|
918
|
+
},
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
return BlockFlowAction( # type: ignore[no-any-return]
|
|
922
|
+
action_id=action_id,
|
|
923
|
+
block_type="application",
|
|
924
|
+
blocked_target=application_id,
|
|
925
|
+
rule_id=rule_id,
|
|
926
|
+
zone_id=result_zone_id,
|
|
927
|
+
duration="permanent",
|
|
928
|
+
expires_at=None,
|
|
929
|
+
created_at=created_at,
|
|
930
|
+
).model_dump()
|
|
931
|
+
|
|
932
|
+
|
|
933
|
+
async def export_traffic_flows(
|
|
934
|
+
site_id: str,
|
|
935
|
+
settings: Settings,
|
|
936
|
+
export_format: str = "json",
|
|
937
|
+
time_range: str = "24h",
|
|
938
|
+
include_fields: list[str] | None = None,
|
|
939
|
+
filter_expression: str | None = None,
|
|
940
|
+
max_records: int | None = None,
|
|
941
|
+
) -> str:
|
|
942
|
+
"""Export traffic flows to a file format.
|
|
943
|
+
|
|
944
|
+
Args:
|
|
945
|
+
site_id: Site identifier
|
|
946
|
+
settings: Application settings
|
|
947
|
+
export_format: Export format ("json", "csv")
|
|
948
|
+
time_range: Time range for export
|
|
949
|
+
include_fields: Specific fields to include (None = all)
|
|
950
|
+
filter_expression: Filter expression
|
|
951
|
+
max_records: Maximum number of records
|
|
952
|
+
|
|
953
|
+
Returns:
|
|
954
|
+
Exported data as string
|
|
955
|
+
"""
|
|
956
|
+
async with UniFiClient(settings) as client:
|
|
957
|
+
logger.info(f"Exporting traffic flows in {export_format} format")
|
|
958
|
+
|
|
959
|
+
if not client.is_authenticated:
|
|
960
|
+
await client.authenticate()
|
|
961
|
+
|
|
962
|
+
# Get flows based on filter
|
|
963
|
+
if filter_expression:
|
|
964
|
+
flows = await filter_traffic_flows(
|
|
965
|
+
site_id, settings, filter_expression, time_range, max_records
|
|
966
|
+
)
|
|
967
|
+
else:
|
|
968
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
969
|
+
if max_records:
|
|
970
|
+
flows = flows[:max_records]
|
|
971
|
+
|
|
972
|
+
# Filter fields if specified
|
|
973
|
+
if include_fields:
|
|
974
|
+
flows = [
|
|
975
|
+
{field: flow.get(field) for field in include_fields if field in flow}
|
|
976
|
+
for flow in flows
|
|
977
|
+
]
|
|
978
|
+
|
|
979
|
+
# Export to requested format
|
|
980
|
+
if export_format == "json":
|
|
981
|
+
return json.dumps(flows, indent=2)
|
|
982
|
+
|
|
983
|
+
elif export_format == "csv":
|
|
984
|
+
if not flows:
|
|
985
|
+
return ""
|
|
986
|
+
|
|
987
|
+
output = StringIO()
|
|
988
|
+
# Get all unique fields
|
|
989
|
+
all_fields: set[str] = set()
|
|
990
|
+
for flow in flows:
|
|
991
|
+
all_fields.update(flow.keys())
|
|
992
|
+
|
|
993
|
+
fieldnames = sorted(all_fields)
|
|
994
|
+
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
995
|
+
writer.writeheader()
|
|
996
|
+
writer.writerows(flows)
|
|
997
|
+
|
|
998
|
+
return output.getvalue()
|
|
999
|
+
|
|
1000
|
+
else:
|
|
1001
|
+
raise ValueError(f"Unsupported export format: {export_format}")
|
|
1002
|
+
|
|
1003
|
+
|
|
1004
|
+
async def get_flow_analytics(
|
|
1005
|
+
site_id: str,
|
|
1006
|
+
settings: Settings,
|
|
1007
|
+
time_range: str = "24h",
|
|
1008
|
+
) -> dict:
|
|
1009
|
+
"""Get comprehensive flow analytics.
|
|
1010
|
+
|
|
1011
|
+
Args:
|
|
1012
|
+
site_id: Site identifier
|
|
1013
|
+
settings: Application settings
|
|
1014
|
+
time_range: Time range for analytics
|
|
1015
|
+
|
|
1016
|
+
Returns:
|
|
1017
|
+
Comprehensive analytics data
|
|
1018
|
+
"""
|
|
1019
|
+
async with UniFiClient(settings) as client:
|
|
1020
|
+
logger.info(f"Generating flow analytics for site {site_id}")
|
|
1021
|
+
|
|
1022
|
+
if not client.is_authenticated:
|
|
1023
|
+
await client.authenticate()
|
|
1024
|
+
|
|
1025
|
+
# Get flows and statistics
|
|
1026
|
+
flows = await get_traffic_flows(site_id, settings, time_range=time_range)
|
|
1027
|
+
statistics = await get_flow_statistics(site_id, settings, time_range)
|
|
1028
|
+
states = await get_connection_states(site_id, settings, time_range)
|
|
1029
|
+
|
|
1030
|
+
# Additional analytics
|
|
1031
|
+
protocols: dict[str, int] = {}
|
|
1032
|
+
applications: dict[str, dict[str, int]] = {}
|
|
1033
|
+
|
|
1034
|
+
for flow in flows:
|
|
1035
|
+
# Protocol distribution
|
|
1036
|
+
protocol = flow.get("protocol", "unknown")
|
|
1037
|
+
protocols[protocol] = protocols.get(protocol, 0) + 1
|
|
1038
|
+
|
|
1039
|
+
# Application distribution
|
|
1040
|
+
app = flow.get("application_name", "Unknown")
|
|
1041
|
+
total_bytes = flow.get("bytes_sent", 0) + flow.get("bytes_received", 0)
|
|
1042
|
+
if app not in applications:
|
|
1043
|
+
applications[app] = {"count": 0, "bytes": 0}
|
|
1044
|
+
applications[app]["count"] += 1
|
|
1045
|
+
applications[app]["bytes"] += total_bytes
|
|
1046
|
+
|
|
1047
|
+
# State distribution
|
|
1048
|
+
state_distribution: dict[str, int] = {}
|
|
1049
|
+
for state in states:
|
|
1050
|
+
state_type = state.get("state", "unknown")
|
|
1051
|
+
state_distribution[state_type] = state_distribution.get(state_type, 0) + 1
|
|
1052
|
+
|
|
1053
|
+
return {
|
|
1054
|
+
"site_id": site_id,
|
|
1055
|
+
"time_range": time_range,
|
|
1056
|
+
"statistics": statistics,
|
|
1057
|
+
"protocol_distribution": protocols,
|
|
1058
|
+
"application_distribution": applications,
|
|
1059
|
+
"state_distribution": state_distribution,
|
|
1060
|
+
"total_flows": len(flows),
|
|
1061
|
+
"total_states": len(states),
|
|
1062
|
+
}
|