iflow-mcp_enuno-unifi-mcp-server 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/METADATA +1282 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/RECORD +81 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/WHEEL +4 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/entry_points.txt +2 -0
- iflow_mcp_enuno_unifi_mcp_server-0.2.1.dist-info/licenses/LICENSE +201 -0
- src/__init__.py +3 -0
- src/__main__.py +6 -0
- src/api/__init__.py +5 -0
- src/api/client.py +727 -0
- src/api/site_manager_client.py +176 -0
- src/cache.py +483 -0
- src/config/__init__.py +5 -0
- src/config/config.py +321 -0
- src/main.py +2234 -0
- src/models/__init__.py +126 -0
- src/models/acl.py +41 -0
- src/models/backup.py +272 -0
- src/models/client.py +74 -0
- src/models/device.py +53 -0
- src/models/dpi.py +50 -0
- src/models/firewall_policy.py +123 -0
- src/models/firewall_zone.py +28 -0
- src/models/network.py +62 -0
- src/models/qos_profile.py +458 -0
- src/models/radius.py +141 -0
- src/models/reference_data.py +34 -0
- src/models/site.py +59 -0
- src/models/site_manager.py +120 -0
- src/models/topology.py +138 -0
- src/models/traffic_flow.py +137 -0
- src/models/traffic_matching_list.py +56 -0
- src/models/voucher.py +42 -0
- src/models/vpn.py +73 -0
- src/models/wan.py +48 -0
- src/models/zbf_matrix.py +49 -0
- src/resources/__init__.py +8 -0
- src/resources/clients.py +111 -0
- src/resources/devices.py +102 -0
- src/resources/networks.py +93 -0
- src/resources/site_manager.py +64 -0
- src/resources/sites.py +86 -0
- src/tools/__init__.py +25 -0
- src/tools/acls.py +328 -0
- src/tools/application.py +42 -0
- src/tools/backups.py +1173 -0
- src/tools/client_management.py +505 -0
- src/tools/clients.py +203 -0
- src/tools/device_control.py +325 -0
- src/tools/devices.py +354 -0
- src/tools/dpi.py +241 -0
- src/tools/dpi_tools.py +89 -0
- src/tools/firewall.py +417 -0
- src/tools/firewall_policies.py +430 -0
- src/tools/firewall_zones.py +515 -0
- src/tools/network_config.py +388 -0
- src/tools/networks.py +190 -0
- src/tools/port_forwarding.py +263 -0
- src/tools/qos.py +1070 -0
- src/tools/radius.py +763 -0
- src/tools/reference_data.py +107 -0
- src/tools/site_manager.py +466 -0
- src/tools/site_vpn.py +95 -0
- src/tools/sites.py +187 -0
- src/tools/topology.py +406 -0
- src/tools/traffic_flows.py +1062 -0
- src/tools/traffic_matching_lists.py +371 -0
- src/tools/vouchers.py +249 -0
- src/tools/vpn.py +76 -0
- src/tools/wans.py +30 -0
- src/tools/wifi.py +498 -0
- src/tools/zbf_matrix.py +326 -0
- src/utils/__init__.py +88 -0
- src/utils/audit.py +213 -0
- src/utils/exceptions.py +114 -0
- src/utils/helpers.py +159 -0
- src/utils/logger.py +105 -0
- src/utils/sanitize.py +244 -0
- src/utils/validators.py +160 -0
- src/webhooks/__init__.py +6 -0
- src/webhooks/handlers.py +196 -0
- src/webhooks/receiver.py +290 -0
src/tools/backups.py
ADDED
|
@@ -0,0 +1,1173 @@
|
|
|
1
|
+
"""Backup and restore operations MCP tools."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from ..api import UniFiClient
|
|
9
|
+
from ..config import Settings
|
|
10
|
+
from ..utils import ValidationError, get_logger, log_audit, validate_confirmation, validate_site_id
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def trigger_backup(
|
|
14
|
+
site_id: str,
|
|
15
|
+
backup_type: str,
|
|
16
|
+
settings: Settings,
|
|
17
|
+
retention_days: int = 30,
|
|
18
|
+
confirm: bool = False,
|
|
19
|
+
dry_run: bool = False,
|
|
20
|
+
) -> dict[str, Any]:
|
|
21
|
+
"""Trigger a backup operation on the UniFi controller.
|
|
22
|
+
|
|
23
|
+
This creates a new backup of the specified type. The backup process may take
|
|
24
|
+
several minutes depending on the size of your configuration and number of devices.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
site_id: Site identifier
|
|
28
|
+
backup_type: Type of backup ("network" or "system")
|
|
29
|
+
- "network": Network settings and device configurations only
|
|
30
|
+
- "system": Complete OS, application, and device configurations
|
|
31
|
+
settings: Application settings
|
|
32
|
+
retention_days: Number of days to retain the backup (default: 30, -1 for indefinite)
|
|
33
|
+
confirm: Confirmation flag (must be True to execute)
|
|
34
|
+
dry_run: If True, validate but don't create the backup
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Backup operation result including download URL and metadata
|
|
38
|
+
|
|
39
|
+
Raises:
|
|
40
|
+
ValidationError: If confirm is not True or backup_type is invalid
|
|
41
|
+
|
|
42
|
+
Example:
|
|
43
|
+
```python
|
|
44
|
+
result = await trigger_backup(
|
|
45
|
+
site_id="default",
|
|
46
|
+
backup_type="network",
|
|
47
|
+
retention_days=30,
|
|
48
|
+
confirm=True,
|
|
49
|
+
settings=settings
|
|
50
|
+
)
|
|
51
|
+
print(f"Backup created: {result['filename']}")
|
|
52
|
+
print(f"Download from: {result['download_url']}")
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
Note:
|
|
56
|
+
- Network backups are faster and smaller (typically <10 MB)
|
|
57
|
+
- System backups are comprehensive but larger (can be >100 MB)
|
|
58
|
+
- After backup completes, use the download_url to retrieve the file
|
|
59
|
+
- Backup files are named with timestamp: backup_YYYY-MM-DD_HH-MM-SS.unf
|
|
60
|
+
"""
|
|
61
|
+
site_id = validate_site_id(site_id)
|
|
62
|
+
validate_confirmation(confirm, "backup operation")
|
|
63
|
+
logger = get_logger(__name__, settings.log_level)
|
|
64
|
+
|
|
65
|
+
# Validate backup type
|
|
66
|
+
valid_types = ["network", "system"]
|
|
67
|
+
if backup_type.lower() not in valid_types:
|
|
68
|
+
raise ValidationError(f"Invalid backup_type '{backup_type}'. Must be one of: {valid_types}")
|
|
69
|
+
|
|
70
|
+
# Validate retention days
|
|
71
|
+
if retention_days < -1 or retention_days == 0:
|
|
72
|
+
raise ValidationError("retention_days must be -1 (indefinite) or positive integer")
|
|
73
|
+
|
|
74
|
+
parameters = {
|
|
75
|
+
"site_id": site_id,
|
|
76
|
+
"backup_type": backup_type,
|
|
77
|
+
"retention_days": retention_days,
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if dry_run:
|
|
81
|
+
logger.info(
|
|
82
|
+
f"DRY RUN: Would create {backup_type} backup for site '{site_id}' "
|
|
83
|
+
f"with {retention_days} days retention"
|
|
84
|
+
)
|
|
85
|
+
log_audit(
|
|
86
|
+
operation="trigger_backup",
|
|
87
|
+
parameters=parameters,
|
|
88
|
+
result="dry_run",
|
|
89
|
+
site_id=site_id,
|
|
90
|
+
dry_run=True,
|
|
91
|
+
)
|
|
92
|
+
return {
|
|
93
|
+
"dry_run": True,
|
|
94
|
+
"would_create": {
|
|
95
|
+
"backup_type": backup_type,
|
|
96
|
+
"retention_days": retention_days,
|
|
97
|
+
"estimated_size": "10-100 MB" if backup_type == "system" else "<10 MB",
|
|
98
|
+
},
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
async with UniFiClient(settings) as client:
|
|
103
|
+
await client.authenticate()
|
|
104
|
+
|
|
105
|
+
response = await client.trigger_backup(
|
|
106
|
+
site_id=site_id,
|
|
107
|
+
backup_type=backup_type,
|
|
108
|
+
days=retention_days,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Extract backup information from response
|
|
112
|
+
# Response format: {"data": {"url": "/data/backup/filename.unf", "id": "..."}}
|
|
113
|
+
backup_data = response.get("data", {})
|
|
114
|
+
download_url = backup_data.get("url", "")
|
|
115
|
+
backup_id = backup_data.get("id", "")
|
|
116
|
+
|
|
117
|
+
# Extract filename from URL
|
|
118
|
+
filename = (
|
|
119
|
+
download_url.split("/")[-1]
|
|
120
|
+
if download_url
|
|
121
|
+
else f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.unf"
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
result = {
|
|
125
|
+
"backup_id": backup_id or filename.replace(".unf", ""),
|
|
126
|
+
"filename": filename,
|
|
127
|
+
"download_url": download_url,
|
|
128
|
+
"backup_type": backup_type,
|
|
129
|
+
"created_at": datetime.now().isoformat(),
|
|
130
|
+
"retention_days": retention_days,
|
|
131
|
+
"status": "completed",
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
logger.info(
|
|
135
|
+
f"Successfully created {backup_type} backup '{filename}' for site '{site_id}'"
|
|
136
|
+
)
|
|
137
|
+
log_audit(
|
|
138
|
+
operation="trigger_backup",
|
|
139
|
+
parameters=parameters,
|
|
140
|
+
result="success",
|
|
141
|
+
site_id=site_id,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
return result
|
|
145
|
+
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.error(f"Failed to create backup for site '{site_id}': {e}")
|
|
148
|
+
log_audit(
|
|
149
|
+
operation="trigger_backup",
|
|
150
|
+
parameters=parameters,
|
|
151
|
+
result="error",
|
|
152
|
+
error=str(e),
|
|
153
|
+
site_id=site_id,
|
|
154
|
+
)
|
|
155
|
+
raise
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def list_backups(
|
|
159
|
+
site_id: str,
|
|
160
|
+
settings: Settings,
|
|
161
|
+
) -> list[dict[str, Any]]:
|
|
162
|
+
"""List all available backups for a site.
|
|
163
|
+
|
|
164
|
+
Retrieves metadata for all backup files including file size, creation date,
|
|
165
|
+
type, and validity status.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
site_id: Site identifier
|
|
169
|
+
settings: Application settings
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
List of backup metadata dictionaries
|
|
173
|
+
|
|
174
|
+
Example:
|
|
175
|
+
```python
|
|
176
|
+
backups = await list_backups(site_id="default", settings=settings)
|
|
177
|
+
for backup in backups:
|
|
178
|
+
print(f"{backup['filename']}: {backup['size_bytes']} bytes, "
|
|
179
|
+
f"created {backup['created_at']}")
|
|
180
|
+
```
|
|
181
|
+
"""
|
|
182
|
+
site_id = validate_site_id(site_id)
|
|
183
|
+
logger = get_logger(__name__, settings.log_level)
|
|
184
|
+
|
|
185
|
+
async with UniFiClient(settings) as client:
|
|
186
|
+
await client.authenticate()
|
|
187
|
+
|
|
188
|
+
backups_data = await client.list_backups(site_id=site_id)
|
|
189
|
+
|
|
190
|
+
# Transform API response to BackupMetadata format
|
|
191
|
+
backups = []
|
|
192
|
+
for backup in backups_data:
|
|
193
|
+
# Parse backup metadata
|
|
194
|
+
filename = backup.get("filename", backup.get("name", ""))
|
|
195
|
+
size_bytes = backup.get("size", backup.get("filesize", 0))
|
|
196
|
+
created_timestamp = backup.get("datetime", backup.get("created", ""))
|
|
197
|
+
|
|
198
|
+
# Determine backup type from filename or metadata
|
|
199
|
+
backup_type_str = backup.get("type", "")
|
|
200
|
+
if not backup_type_str:
|
|
201
|
+
# Infer from filename: .unf = network, .unifi = system
|
|
202
|
+
backup_type_str = "SYSTEM" if filename.endswith(".unifi") else "NETWORK"
|
|
203
|
+
|
|
204
|
+
backups.append(
|
|
205
|
+
{
|
|
206
|
+
"backup_id": backup.get(
|
|
207
|
+
"id", filename.replace(".unf", "").replace(".unifi", "")
|
|
208
|
+
),
|
|
209
|
+
"filename": filename,
|
|
210
|
+
"backup_type": backup_type_str,
|
|
211
|
+
"created_at": created_timestamp,
|
|
212
|
+
"size_bytes": size_bytes,
|
|
213
|
+
"version": backup.get("version", ""),
|
|
214
|
+
"is_valid": backup.get("valid", True),
|
|
215
|
+
"cloud_synced": backup.get("cloud_backup", False),
|
|
216
|
+
}
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
logger.info(f"Retrieved {len(backups)} backups for site '{site_id}'")
|
|
220
|
+
return backups
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
async def get_backup_details(
|
|
224
|
+
site_id: str,
|
|
225
|
+
backup_filename: str,
|
|
226
|
+
settings: Settings,
|
|
227
|
+
) -> dict[str, Any]:
|
|
228
|
+
"""Get detailed information about a specific backup.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
site_id: Site identifier
|
|
232
|
+
backup_filename: Backup filename (e.g., "backup_2025-01-29.unf")
|
|
233
|
+
settings: Application settings
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
Detailed backup metadata dictionary
|
|
237
|
+
|
|
238
|
+
Raises:
|
|
239
|
+
ResourceNotFoundError: If backup file is not found
|
|
240
|
+
"""
|
|
241
|
+
site_id = validate_site_id(site_id)
|
|
242
|
+
logger = get_logger(__name__, settings.log_level)
|
|
243
|
+
|
|
244
|
+
# List all backups and find the matching one
|
|
245
|
+
backups = await list_backups(site_id=site_id, settings=settings)
|
|
246
|
+
|
|
247
|
+
for backup in backups:
|
|
248
|
+
if backup["filename"] == backup_filename:
|
|
249
|
+
logger.info(f"Retrieved details for backup '{backup_filename}' in site '{site_id}'")
|
|
250
|
+
return backup
|
|
251
|
+
|
|
252
|
+
from ..utils import ResourceNotFoundError
|
|
253
|
+
|
|
254
|
+
raise ResourceNotFoundError("backup", backup_filename)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
async def download_backup(
|
|
258
|
+
site_id: str,
|
|
259
|
+
backup_filename: str,
|
|
260
|
+
output_path: str,
|
|
261
|
+
settings: Settings,
|
|
262
|
+
verify_checksum: bool = True,
|
|
263
|
+
) -> dict[str, Any]:
|
|
264
|
+
"""Download a backup file to local storage.
|
|
265
|
+
|
|
266
|
+
Downloads the specified backup file and optionally verifies its integrity
|
|
267
|
+
using checksum validation.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
site_id: Site identifier
|
|
271
|
+
backup_filename: Backup filename to download
|
|
272
|
+
output_path: Local filesystem path to save the backup
|
|
273
|
+
settings: Application settings
|
|
274
|
+
verify_checksum: Whether to calculate and verify file checksum
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
Download result with file path and metadata
|
|
278
|
+
|
|
279
|
+
Example:
|
|
280
|
+
```python
|
|
281
|
+
result = await download_backup(
|
|
282
|
+
site_id="default",
|
|
283
|
+
backup_filename="backup_2025-01-29.unf",
|
|
284
|
+
output_path="/backups/unifi_backup.unf",
|
|
285
|
+
settings=settings
|
|
286
|
+
)
|
|
287
|
+
print(f"Downloaded to: {result['local_path']}")
|
|
288
|
+
print(f"Size: {result['size_bytes']} bytes")
|
|
289
|
+
print(f"Checksum: {result['checksum']}")
|
|
290
|
+
```
|
|
291
|
+
"""
|
|
292
|
+
site_id = validate_site_id(site_id)
|
|
293
|
+
logger = get_logger(__name__, settings.log_level)
|
|
294
|
+
|
|
295
|
+
logger.info(f"Downloading backup '{backup_filename}' from site '{site_id}'")
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
async with UniFiClient(settings) as client:
|
|
299
|
+
await client.authenticate()
|
|
300
|
+
|
|
301
|
+
# Download backup content
|
|
302
|
+
backup_content = await client.download_backup(
|
|
303
|
+
site_id=site_id,
|
|
304
|
+
backup_filename=backup_filename,
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
# Write to file
|
|
308
|
+
output_file = Path(output_path)
|
|
309
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
310
|
+
output_file.write_bytes(backup_content)
|
|
311
|
+
|
|
312
|
+
# Calculate checksum if requested
|
|
313
|
+
checksum = ""
|
|
314
|
+
if verify_checksum:
|
|
315
|
+
sha256_hash = hashlib.sha256()
|
|
316
|
+
sha256_hash.update(backup_content)
|
|
317
|
+
checksum = sha256_hash.hexdigest()
|
|
318
|
+
|
|
319
|
+
result = {
|
|
320
|
+
"backup_filename": backup_filename,
|
|
321
|
+
"local_path": str(output_file.absolute()),
|
|
322
|
+
"size_bytes": len(backup_content),
|
|
323
|
+
"checksum": checksum if verify_checksum else None,
|
|
324
|
+
"download_time": datetime.now().isoformat(),
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
logger.info(
|
|
328
|
+
f"Successfully downloaded backup '{backup_filename}' to '{output_path}' "
|
|
329
|
+
f"({len(backup_content)} bytes)"
|
|
330
|
+
)
|
|
331
|
+
log_audit(
|
|
332
|
+
operation="download_backup",
|
|
333
|
+
parameters={"site_id": site_id, "backup_filename": backup_filename},
|
|
334
|
+
result="success",
|
|
335
|
+
site_id=site_id,
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
return result
|
|
339
|
+
|
|
340
|
+
except Exception as e:
|
|
341
|
+
logger.error(f"Failed to download backup '{backup_filename}': {e}")
|
|
342
|
+
log_audit(
|
|
343
|
+
operation="download_backup",
|
|
344
|
+
parameters={"site_id": site_id, "backup_filename": backup_filename},
|
|
345
|
+
result="error",
|
|
346
|
+
error=str(e),
|
|
347
|
+
site_id=site_id,
|
|
348
|
+
)
|
|
349
|
+
raise
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
async def delete_backup(
|
|
353
|
+
site_id: str,
|
|
354
|
+
backup_filename: str,
|
|
355
|
+
settings: Settings,
|
|
356
|
+
confirm: bool = False,
|
|
357
|
+
dry_run: bool = False,
|
|
358
|
+
) -> dict[str, Any]:
|
|
359
|
+
"""Delete a backup file from the controller.
|
|
360
|
+
|
|
361
|
+
Permanently removes a backup file from the UniFi controller storage.
|
|
362
|
+
This operation cannot be undone.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
site_id: Site identifier
|
|
366
|
+
backup_filename: Backup filename to delete
|
|
367
|
+
settings: Application settings
|
|
368
|
+
confirm: Confirmation flag (must be True to execute)
|
|
369
|
+
dry_run: If True, validate but don't delete the backup
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
Deletion result
|
|
373
|
+
|
|
374
|
+
Raises:
|
|
375
|
+
ValidationError: If confirm is not True
|
|
376
|
+
|
|
377
|
+
Example:
|
|
378
|
+
```python
|
|
379
|
+
result = await delete_backup(
|
|
380
|
+
site_id="default",
|
|
381
|
+
backup_filename="old_backup_2024-01-01.unf",
|
|
382
|
+
confirm=True,
|
|
383
|
+
settings=settings
|
|
384
|
+
)
|
|
385
|
+
print(f"Deleted: {result['backup_filename']}")
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
Warning:
|
|
389
|
+
This operation permanently deletes the backup file.
|
|
390
|
+
Ensure you have downloaded or don't need the backup before deleting.
|
|
391
|
+
"""
|
|
392
|
+
site_id = validate_site_id(site_id)
|
|
393
|
+
validate_confirmation(confirm, "backup deletion")
|
|
394
|
+
logger = get_logger(__name__, settings.log_level)
|
|
395
|
+
|
|
396
|
+
parameters = {
|
|
397
|
+
"site_id": site_id,
|
|
398
|
+
"backup_filename": backup_filename,
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
if dry_run:
|
|
402
|
+
logger.info(f"DRY RUN: Would delete backup '{backup_filename}' from site '{site_id}'")
|
|
403
|
+
log_audit(
|
|
404
|
+
operation="delete_backup",
|
|
405
|
+
parameters=parameters,
|
|
406
|
+
result="dry_run",
|
|
407
|
+
site_id=site_id,
|
|
408
|
+
dry_run=True,
|
|
409
|
+
)
|
|
410
|
+
return {"dry_run": True, "would_delete": backup_filename}
|
|
411
|
+
|
|
412
|
+
try:
|
|
413
|
+
async with UniFiClient(settings) as client:
|
|
414
|
+
await client.authenticate()
|
|
415
|
+
|
|
416
|
+
await client.delete_backup(
|
|
417
|
+
site_id=site_id,
|
|
418
|
+
backup_filename=backup_filename,
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
logger.info(f"Successfully deleted backup '{backup_filename}' from site '{site_id}'")
|
|
422
|
+
log_audit(
|
|
423
|
+
operation="delete_backup",
|
|
424
|
+
parameters=parameters,
|
|
425
|
+
result="success",
|
|
426
|
+
site_id=site_id,
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
return {
|
|
430
|
+
"backup_filename": backup_filename,
|
|
431
|
+
"status": "deleted",
|
|
432
|
+
"deleted_at": datetime.now().isoformat(),
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
except Exception as e:
|
|
436
|
+
logger.error(f"Failed to delete backup '{backup_filename}': {e}")
|
|
437
|
+
log_audit(
|
|
438
|
+
operation="delete_backup",
|
|
439
|
+
parameters=parameters,
|
|
440
|
+
result="error",
|
|
441
|
+
error=str(e),
|
|
442
|
+
site_id=site_id,
|
|
443
|
+
)
|
|
444
|
+
raise
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
async def restore_backup(
|
|
448
|
+
site_id: str,
|
|
449
|
+
backup_filename: str,
|
|
450
|
+
settings: Settings,
|
|
451
|
+
create_pre_restore_backup: bool = True,
|
|
452
|
+
confirm: bool = False,
|
|
453
|
+
dry_run: bool = False,
|
|
454
|
+
) -> dict[str, Any]:
|
|
455
|
+
"""Restore the UniFi controller from a backup file.
|
|
456
|
+
|
|
457
|
+
This is a DESTRUCTIVE operation that will restore the controller to the state
|
|
458
|
+
captured in the backup. The controller may restart during the restore process.
|
|
459
|
+
|
|
460
|
+
Safety features:
|
|
461
|
+
- Automatic pre-restore backup creation (enabled by default)
|
|
462
|
+
- Mandatory confirmation flag
|
|
463
|
+
- Dry-run mode for validation
|
|
464
|
+
- Audit logging
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
site_id: Site identifier
|
|
468
|
+
backup_filename: Backup filename to restore from
|
|
469
|
+
settings: Application settings
|
|
470
|
+
create_pre_restore_backup: Create automatic backup before restore (recommended)
|
|
471
|
+
confirm: Confirmation flag (must be True to execute)
|
|
472
|
+
dry_run: If True, validate but don't restore
|
|
473
|
+
|
|
474
|
+
Returns:
|
|
475
|
+
Restore operation result including pre-restore backup info
|
|
476
|
+
|
|
477
|
+
Raises:
|
|
478
|
+
ValidationError: If confirm is not True
|
|
479
|
+
|
|
480
|
+
Example:
|
|
481
|
+
```python
|
|
482
|
+
# ALWAYS use confirm=True for restore operations
|
|
483
|
+
result = await restore_backup(
|
|
484
|
+
site_id="default",
|
|
485
|
+
backup_filename="backup_2025-01-29.unf",
|
|
486
|
+
create_pre_restore_backup=True, # Create safety backup first
|
|
487
|
+
confirm=True,
|
|
488
|
+
settings=settings
|
|
489
|
+
)
|
|
490
|
+
print(f"Restore initiated. Pre-restore backup: {result['pre_restore_backup_id']}")
|
|
491
|
+
```
|
|
492
|
+
|
|
493
|
+
Warning:
|
|
494
|
+
This operation will:
|
|
495
|
+
1. Restore all configuration from the backup
|
|
496
|
+
2. May overwrite current settings
|
|
497
|
+
3. May cause controller restart
|
|
498
|
+
4. May temporarily disconnect devices
|
|
499
|
+
|
|
500
|
+
ALWAYS create a pre-restore backup (enabled by default) so you can
|
|
501
|
+
rollback if needed.
|
|
502
|
+
"""
|
|
503
|
+
site_id = validate_site_id(site_id)
|
|
504
|
+
validate_confirmation(confirm, "RESTORE operation - this will OVERWRITE current configuration")
|
|
505
|
+
logger = get_logger(__name__, settings.log_level)
|
|
506
|
+
|
|
507
|
+
parameters = {
|
|
508
|
+
"site_id": site_id,
|
|
509
|
+
"backup_filename": backup_filename,
|
|
510
|
+
"create_pre_restore_backup": create_pre_restore_backup,
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
if dry_run:
|
|
514
|
+
logger.info(f"DRY RUN: Would restore from backup '{backup_filename}' for site '{site_id}'")
|
|
515
|
+
log_audit(
|
|
516
|
+
operation="restore_backup",
|
|
517
|
+
parameters=parameters,
|
|
518
|
+
result="dry_run",
|
|
519
|
+
site_id=site_id,
|
|
520
|
+
dry_run=True,
|
|
521
|
+
)
|
|
522
|
+
return {
|
|
523
|
+
"dry_run": True,
|
|
524
|
+
"would_restore_from": backup_filename,
|
|
525
|
+
"would_create_pre_restore_backup": create_pre_restore_backup,
|
|
526
|
+
"warning": "Controller will restart during restore",
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
try:
|
|
530
|
+
async with UniFiClient(settings) as client:
|
|
531
|
+
await client.authenticate()
|
|
532
|
+
|
|
533
|
+
# Create pre-restore backup if requested
|
|
534
|
+
pre_restore_backup_id = None
|
|
535
|
+
if create_pre_restore_backup:
|
|
536
|
+
logger.info("Creating pre-restore backup for safety...")
|
|
537
|
+
pre_restore_result = await trigger_backup(
|
|
538
|
+
site_id=site_id,
|
|
539
|
+
backup_type="network",
|
|
540
|
+
retention_days=7, # Keep for 7 days
|
|
541
|
+
confirm=True,
|
|
542
|
+
settings=settings,
|
|
543
|
+
)
|
|
544
|
+
pre_restore_backup_id = pre_restore_result["backup_id"]
|
|
545
|
+
logger.info(f"Pre-restore backup created: {pre_restore_backup_id}")
|
|
546
|
+
|
|
547
|
+
# Perform restore
|
|
548
|
+
logger.warning(
|
|
549
|
+
f"INITIATING RESTORE from '{backup_filename}' for site '{site_id}'. "
|
|
550
|
+
"Controller may restart."
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
restore_response = await client.restore_backup(
|
|
554
|
+
site_id=site_id,
|
|
555
|
+
backup_filename=backup_filename,
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
result = {
|
|
559
|
+
"backup_filename": backup_filename,
|
|
560
|
+
"status": "restore_initiated",
|
|
561
|
+
"pre_restore_backup_id": pre_restore_backup_id,
|
|
562
|
+
"can_rollback": pre_restore_backup_id is not None,
|
|
563
|
+
"restore_time": datetime.now().isoformat(),
|
|
564
|
+
"warning": "Controller may restart. Devices may temporarily disconnect.",
|
|
565
|
+
"restore_response": restore_response,
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
logger.warning(
|
|
569
|
+
f"Restore initiated from '{backup_filename}'. "
|
|
570
|
+
f"Pre-restore backup: {pre_restore_backup_id or 'None'}"
|
|
571
|
+
)
|
|
572
|
+
log_audit(
|
|
573
|
+
operation="restore_backup",
|
|
574
|
+
parameters=parameters,
|
|
575
|
+
result="success",
|
|
576
|
+
site_id=site_id,
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
return result
|
|
580
|
+
|
|
581
|
+
except Exception as e:
|
|
582
|
+
logger.error(f"Failed to restore from backup '{backup_filename}': {e}")
|
|
583
|
+
log_audit(
|
|
584
|
+
operation="restore_backup",
|
|
585
|
+
parameters=parameters,
|
|
586
|
+
result="error",
|
|
587
|
+
error=str(e),
|
|
588
|
+
site_id=site_id,
|
|
589
|
+
)
|
|
590
|
+
raise
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
async def validate_backup(
|
|
594
|
+
site_id: str,
|
|
595
|
+
backup_filename: str,
|
|
596
|
+
settings: Settings,
|
|
597
|
+
) -> dict[str, Any]:
|
|
598
|
+
"""Validate a backup file before restore.
|
|
599
|
+
|
|
600
|
+
Performs integrity checks on a backup file to ensure it's valid and compatible
|
|
601
|
+
with the current controller version.
|
|
602
|
+
|
|
603
|
+
Args:
|
|
604
|
+
site_id: Site identifier
|
|
605
|
+
backup_filename: Backup filename to validate
|
|
606
|
+
settings: Application settings
|
|
607
|
+
|
|
608
|
+
Returns:
|
|
609
|
+
Validation result with details and warnings
|
|
610
|
+
|
|
611
|
+
Example:
|
|
612
|
+
```python
|
|
613
|
+
validation = await validate_backup(
|
|
614
|
+
site_id="default",
|
|
615
|
+
backup_filename="backup_2025-01-29.unf",
|
|
616
|
+
settings=settings
|
|
617
|
+
)
|
|
618
|
+
if validation['is_valid']:
|
|
619
|
+
print("Backup is valid and ready to restore")
|
|
620
|
+
else:
|
|
621
|
+
print(f"Validation errors: {validation['errors']}")
|
|
622
|
+
```
|
|
623
|
+
"""
|
|
624
|
+
site_id = validate_site_id(site_id)
|
|
625
|
+
logger = get_logger(__name__, settings.log_level)
|
|
626
|
+
|
|
627
|
+
try:
|
|
628
|
+
# Get backup details
|
|
629
|
+
backup_details = await get_backup_details(
|
|
630
|
+
site_id=site_id,
|
|
631
|
+
backup_filename=backup_filename,
|
|
632
|
+
settings=settings,
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
# Basic validation checks
|
|
636
|
+
warnings = []
|
|
637
|
+
errors = []
|
|
638
|
+
|
|
639
|
+
# Check file size
|
|
640
|
+
size_bytes = backup_details.get("size_bytes", 0)
|
|
641
|
+
if size_bytes == 0:
|
|
642
|
+
errors.append("Backup file appears to be empty")
|
|
643
|
+
elif size_bytes < 1024: # Less than 1 KB
|
|
644
|
+
warnings.append("Backup file is unusually small")
|
|
645
|
+
|
|
646
|
+
# Check backup validity flag
|
|
647
|
+
if not backup_details.get("is_valid", True):
|
|
648
|
+
errors.append("Backup is marked as invalid by controller")
|
|
649
|
+
|
|
650
|
+
# Version compatibility check would require downloading the file
|
|
651
|
+
# For now, we just note if version info is available
|
|
652
|
+
backup_version = backup_details.get("version", "")
|
|
653
|
+
if not backup_version:
|
|
654
|
+
warnings.append("Backup version unknown - cannot verify compatibility")
|
|
655
|
+
|
|
656
|
+
is_valid = len(errors) == 0
|
|
657
|
+
|
|
658
|
+
result = {
|
|
659
|
+
"backup_id": backup_details.get("backup_id", ""),
|
|
660
|
+
"backup_filename": backup_filename,
|
|
661
|
+
"is_valid": is_valid,
|
|
662
|
+
"checksum_valid": True, # Assumed true if controller lists it
|
|
663
|
+
"format_valid": is_valid,
|
|
664
|
+
"version_compatible": len(errors) == 0,
|
|
665
|
+
"backup_version": backup_version,
|
|
666
|
+
"warnings": warnings,
|
|
667
|
+
"errors": errors,
|
|
668
|
+
"size_bytes": size_bytes,
|
|
669
|
+
"validated_at": datetime.now().isoformat(),
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
logger.info(
|
|
673
|
+
f"Validated backup '{backup_filename}': "
|
|
674
|
+
f"{'VALID' if is_valid else 'INVALID'} "
|
|
675
|
+
f"({len(warnings)} warnings, {len(errors)} errors)"
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
return result
|
|
679
|
+
|
|
680
|
+
except Exception as e:
|
|
681
|
+
logger.error(f"Failed to validate backup '{backup_filename}': {e}")
|
|
682
|
+
return {
|
|
683
|
+
"backup_filename": backup_filename,
|
|
684
|
+
"is_valid": False,
|
|
685
|
+
"errors": [str(e)],
|
|
686
|
+
"validated_at": datetime.now().isoformat(),
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
async def get_backup_status(
|
|
691
|
+
operation_id: str,
|
|
692
|
+
settings: Settings,
|
|
693
|
+
) -> dict[str, Any]:
|
|
694
|
+
"""Get the status of an ongoing or completed backup operation.
|
|
695
|
+
|
|
696
|
+
Monitor the progress of a backup operation initiated with trigger_backup.
|
|
697
|
+
Useful for tracking long-running system backups.
|
|
698
|
+
|
|
699
|
+
Args:
|
|
700
|
+
operation_id: Backup operation identifier (returned by trigger_backup)
|
|
701
|
+
settings: Application settings
|
|
702
|
+
|
|
703
|
+
Returns:
|
|
704
|
+
Backup operation status including progress and result
|
|
705
|
+
|
|
706
|
+
Example:
|
|
707
|
+
```python
|
|
708
|
+
# Start backup
|
|
709
|
+
backup_result = await trigger_backup(...)
|
|
710
|
+
operation_id = backup_result['operation_id']
|
|
711
|
+
|
|
712
|
+
# Poll for status
|
|
713
|
+
while True:
|
|
714
|
+
status = await get_backup_status(
|
|
715
|
+
operation_id=operation_id,
|
|
716
|
+
settings=settings
|
|
717
|
+
)
|
|
718
|
+
if status['status'] in ['completed', 'failed']:
|
|
719
|
+
break
|
|
720
|
+
await asyncio.sleep(5) # Wait 5 seconds before checking again
|
|
721
|
+
```
|
|
722
|
+
|
|
723
|
+
Note:
|
|
724
|
+
Most backup operations complete quickly (<30 seconds for network backups,
|
|
725
|
+
1-3 minutes for system backups). This tool is primarily useful for
|
|
726
|
+
very large deployments or system backups.
|
|
727
|
+
"""
|
|
728
|
+
logger = get_logger(__name__, settings.log_level)
|
|
729
|
+
|
|
730
|
+
try:
|
|
731
|
+
async with UniFiClient(settings) as client:
|
|
732
|
+
await client.authenticate()
|
|
733
|
+
|
|
734
|
+
# Query backup operation status
|
|
735
|
+
# Note: UniFi API may not have a dedicated status endpoint
|
|
736
|
+
# In practice, backups complete synchronously in trigger_backup
|
|
737
|
+
# This implementation provides a consistent interface
|
|
738
|
+
status_data = await client.get_backup_status(operation_id=operation_id)
|
|
739
|
+
|
|
740
|
+
result = {
|
|
741
|
+
"operation_id": operation_id,
|
|
742
|
+
"status": status_data.get("status", "completed"),
|
|
743
|
+
"progress_percent": status_data.get("progress", 100),
|
|
744
|
+
"current_step": status_data.get("step", "Completed"),
|
|
745
|
+
"started_at": status_data.get("started_at", ""),
|
|
746
|
+
"completed_at": status_data.get("completed_at", ""),
|
|
747
|
+
"backup_metadata": status_data.get("backup", {}),
|
|
748
|
+
"error_message": status_data.get("error", None),
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
logger.info(
|
|
752
|
+
f"Retrieved status for backup operation '{operation_id}': {result['status']}"
|
|
753
|
+
)
|
|
754
|
+
return result
|
|
755
|
+
|
|
756
|
+
except AttributeError:
|
|
757
|
+
# Fallback if API client doesn't have get_backup_status method
|
|
758
|
+
logger.warning(
|
|
759
|
+
f"Backup status API not available. Operation '{operation_id}' status unknown."
|
|
760
|
+
)
|
|
761
|
+
return {
|
|
762
|
+
"operation_id": operation_id,
|
|
763
|
+
"status": "completed", # Assume completed since backups are synchronous
|
|
764
|
+
"progress_percent": 100,
|
|
765
|
+
"message": "Backup operations complete synchronously. Status tracking not available.",
|
|
766
|
+
}
|
|
767
|
+
except Exception as e:
|
|
768
|
+
logger.error(f"Failed to get backup status for '{operation_id}': {e}")
|
|
769
|
+
raise
|
|
770
|
+
|
|
771
|
+
|
|
772
|
+
async def get_restore_status(
|
|
773
|
+
operation_id: str,
|
|
774
|
+
settings: Settings,
|
|
775
|
+
) -> dict[str, Any]:
|
|
776
|
+
"""Get the status of an ongoing or completed restore operation.
|
|
777
|
+
|
|
778
|
+
Monitor the progress of a restore operation initiated with restore_backup.
|
|
779
|
+
Critical for tracking restore progress as controller may restart during restore.
|
|
780
|
+
|
|
781
|
+
Args:
|
|
782
|
+
operation_id: Restore operation identifier (returned by restore_backup)
|
|
783
|
+
settings: Application settings
|
|
784
|
+
|
|
785
|
+
Returns:
|
|
786
|
+
Restore operation status including progress, pre-restore backup info, and rollback availability
|
|
787
|
+
|
|
788
|
+
Example:
|
|
789
|
+
```python
|
|
790
|
+
# Start restore
|
|
791
|
+
restore_result = await restore_backup(...)
|
|
792
|
+
operation_id = restore_result['operation_id']
|
|
793
|
+
|
|
794
|
+
# Monitor restore progress
|
|
795
|
+
while True:
|
|
796
|
+
try:
|
|
797
|
+
status = await get_restore_status(
|
|
798
|
+
operation_id=operation_id,
|
|
799
|
+
settings=settings
|
|
800
|
+
)
|
|
801
|
+
print(f"Restore progress: {status['progress_percent']}%")
|
|
802
|
+
|
|
803
|
+
if status['status'] == 'failed' and status['can_rollback']:
|
|
804
|
+
print(f"Restore failed! Can rollback to: {status['pre_restore_backup_id']}")
|
|
805
|
+
break
|
|
806
|
+
elif status['status'] == 'completed':
|
|
807
|
+
print("Restore completed successfully!")
|
|
808
|
+
break
|
|
809
|
+
|
|
810
|
+
await asyncio.sleep(10)
|
|
811
|
+
except ConnectionError:
|
|
812
|
+
# Controller may be restarting during restore
|
|
813
|
+
await asyncio.sleep(30)
|
|
814
|
+
```
|
|
815
|
+
|
|
816
|
+
Note:
|
|
817
|
+
Restore operations typically take 2-5 minutes and will restart the controller.
|
|
818
|
+
Expect temporary connection loss during the restore process.
|
|
819
|
+
"""
|
|
820
|
+
logger = get_logger(__name__, settings.log_level)
|
|
821
|
+
|
|
822
|
+
try:
|
|
823
|
+
async with UniFiClient(settings) as client:
|
|
824
|
+
await client.authenticate()
|
|
825
|
+
|
|
826
|
+
# Query restore operation status
|
|
827
|
+
status_data = await client.get_restore_status(operation_id=operation_id)
|
|
828
|
+
|
|
829
|
+
result = {
|
|
830
|
+
"operation_id": operation_id,
|
|
831
|
+
"backup_id": status_data.get("backup_id", ""),
|
|
832
|
+
"status": status_data.get("status", "completed"),
|
|
833
|
+
"progress_percent": status_data.get("progress", 100),
|
|
834
|
+
"current_step": status_data.get("step", "Completed"),
|
|
835
|
+
"started_at": status_data.get("started_at", ""),
|
|
836
|
+
"completed_at": status_data.get("completed_at", ""),
|
|
837
|
+
"pre_restore_backup_id": status_data.get("pre_restore_backup_id", None),
|
|
838
|
+
"can_rollback": status_data.get("can_rollback", False),
|
|
839
|
+
"error_message": status_data.get("error", None),
|
|
840
|
+
"rollback_reason": status_data.get("rollback_reason", None),
|
|
841
|
+
}
|
|
842
|
+
|
|
843
|
+
logger.info(
|
|
844
|
+
f"Retrieved status for restore operation '{operation_id}': "
|
|
845
|
+
f"{result['status']} ({result['progress_percent']}%)"
|
|
846
|
+
)
|
|
847
|
+
return result
|
|
848
|
+
|
|
849
|
+
except AttributeError:
|
|
850
|
+
# Fallback if API client doesn't have get_restore_status method
|
|
851
|
+
logger.warning(
|
|
852
|
+
f"Restore status API not available. Operation '{operation_id}' status unknown."
|
|
853
|
+
)
|
|
854
|
+
return {
|
|
855
|
+
"operation_id": operation_id,
|
|
856
|
+
"status": "in_progress",
|
|
857
|
+
"progress_percent": 0,
|
|
858
|
+
"message": "Restore status tracking not available. Controller may restart during restore.",
|
|
859
|
+
"warning": "Monitor controller connectivity to determine restore completion.",
|
|
860
|
+
}
|
|
861
|
+
except Exception as e:
|
|
862
|
+
logger.error(f"Failed to get restore status for '{operation_id}': {e}")
|
|
863
|
+
# During restore, connection errors are expected
|
|
864
|
+
return {
|
|
865
|
+
"operation_id": operation_id,
|
|
866
|
+
"status": "in_progress",
|
|
867
|
+
"message": "Controller temporarily unavailable (expected during restore).",
|
|
868
|
+
"warning": str(e),
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
|
|
872
|
+
async def schedule_backups(
|
|
873
|
+
site_id: str,
|
|
874
|
+
backup_type: str,
|
|
875
|
+
frequency: str,
|
|
876
|
+
time_of_day: str,
|
|
877
|
+
settings: Settings,
|
|
878
|
+
enabled: bool = True,
|
|
879
|
+
retention_days: int = 30,
|
|
880
|
+
max_backups: int = 10,
|
|
881
|
+
day_of_week: int | None = None,
|
|
882
|
+
day_of_month: int | None = None,
|
|
883
|
+
cloud_backup_enabled: bool = False,
|
|
884
|
+
confirm: bool = False,
|
|
885
|
+
dry_run: bool = False,
|
|
886
|
+
) -> dict[str, Any]:
|
|
887
|
+
"""Configure automated backup schedule for a site.
|
|
888
|
+
|
|
889
|
+
Set up recurring backups to run automatically at specified intervals.
|
|
890
|
+
Helps ensure regular backups without manual intervention.
|
|
891
|
+
|
|
892
|
+
Args:
|
|
893
|
+
site_id: Site identifier
|
|
894
|
+
backup_type: Type of backup ("network" or "system")
|
|
895
|
+
frequency: Backup frequency ("daily", "weekly", or "monthly")
|
|
896
|
+
time_of_day: Time to run backup (HH:MM format, 24-hour)
|
|
897
|
+
settings: Application settings
|
|
898
|
+
enabled: Whether schedule is enabled (default: True)
|
|
899
|
+
retention_days: Days to retain backups (default: 30, max: 365)
|
|
900
|
+
max_backups: Maximum number of backups to keep (default: 10, max: 100)
|
|
901
|
+
day_of_week: For weekly: 0=Monday, 6=Sunday (required if frequency="weekly")
|
|
902
|
+
day_of_month: For monthly: 1-31 (required if frequency="monthly")
|
|
903
|
+
cloud_backup_enabled: Whether to sync backups to cloud
|
|
904
|
+
confirm: Confirmation flag (must be True to execute)
|
|
905
|
+
dry_run: If True, validate but don't configure
|
|
906
|
+
|
|
907
|
+
Returns:
|
|
908
|
+
Backup schedule configuration details
|
|
909
|
+
|
|
910
|
+
Raises:
|
|
911
|
+
ValidationError: If parameters are invalid
|
|
912
|
+
|
|
913
|
+
Example:
|
|
914
|
+
```python
|
|
915
|
+
# Daily network backup at 3 AM
|
|
916
|
+
schedule = await schedule_backups(
|
|
917
|
+
site_id="default",
|
|
918
|
+
backup_type="network",
|
|
919
|
+
frequency="daily",
|
|
920
|
+
time_of_day="03:00",
|
|
921
|
+
retention_days=30,
|
|
922
|
+
max_backups=10,
|
|
923
|
+
confirm=True,
|
|
924
|
+
settings=settings
|
|
925
|
+
)
|
|
926
|
+
|
|
927
|
+
# Weekly system backup on Sundays at 2 AM
|
|
928
|
+
schedule = await schedule_backups(
|
|
929
|
+
site_id="default",
|
|
930
|
+
backup_type="system",
|
|
931
|
+
frequency="weekly",
|
|
932
|
+
time_of_day="02:00",
|
|
933
|
+
day_of_week=6, # Sunday
|
|
934
|
+
retention_days=90,
|
|
935
|
+
cloud_backup_enabled=True,
|
|
936
|
+
confirm=True,
|
|
937
|
+
settings=settings
|
|
938
|
+
)
|
|
939
|
+
```
|
|
940
|
+
|
|
941
|
+
Note:
|
|
942
|
+
- Daily backups are recommended for production environments
|
|
943
|
+
- Weekly system backups are sufficient for most use cases
|
|
944
|
+
- Monthly backups are only recommended for static environments
|
|
945
|
+
- Retention and max_backups work together (oldest backups deleted first)
|
|
946
|
+
- Cloud backup requires UniFi account and cloud access enabled
|
|
947
|
+
"""
|
|
948
|
+
site_id = validate_site_id(site_id)
|
|
949
|
+
validate_confirmation(confirm, "backup schedule configuration")
|
|
950
|
+
logger = get_logger(__name__, settings.log_level)
|
|
951
|
+
|
|
952
|
+
# Validate backup type
|
|
953
|
+
valid_types = ["network", "system"]
|
|
954
|
+
if backup_type.lower() not in valid_types:
|
|
955
|
+
raise ValidationError(f"Invalid backup_type '{backup_type}'. Must be one of: {valid_types}")
|
|
956
|
+
|
|
957
|
+
# Validate frequency
|
|
958
|
+
valid_frequencies = ["daily", "weekly", "monthly"]
|
|
959
|
+
if frequency.lower() not in valid_frequencies:
|
|
960
|
+
raise ValidationError(
|
|
961
|
+
f"Invalid frequency '{frequency}'. Must be one of: {valid_frequencies}"
|
|
962
|
+
)
|
|
963
|
+
|
|
964
|
+
# Validate time format (HH:MM)
|
|
965
|
+
import re
|
|
966
|
+
|
|
967
|
+
if not re.match(r"^([01]\d|2[0-3]):([0-5]\d)$", time_of_day):
|
|
968
|
+
raise ValidationError(
|
|
969
|
+
f"Invalid time_of_day '{time_of_day}'. Must be HH:MM format (24-hour)"
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
# Validate frequency-specific parameters
|
|
973
|
+
if frequency == "weekly" and day_of_week is None:
|
|
974
|
+
raise ValidationError("day_of_week required for weekly frequency (0=Monday, 6=Sunday)")
|
|
975
|
+
if frequency == "monthly" and day_of_month is None:
|
|
976
|
+
raise ValidationError("day_of_month required for monthly frequency (1-31)")
|
|
977
|
+
|
|
978
|
+
# Validate day_of_week
|
|
979
|
+
if day_of_week is not None and not (0 <= day_of_week <= 6):
|
|
980
|
+
raise ValidationError("day_of_week must be 0-6 (0=Monday, 6=Sunday)")
|
|
981
|
+
|
|
982
|
+
# Validate day_of_month
|
|
983
|
+
if day_of_month is not None and not (1 <= day_of_month <= 31):
|
|
984
|
+
raise ValidationError("day_of_month must be 1-31")
|
|
985
|
+
|
|
986
|
+
# Validate retention
|
|
987
|
+
if not (1 <= retention_days <= 365):
|
|
988
|
+
raise ValidationError("retention_days must be 1-365")
|
|
989
|
+
if not (1 <= max_backups <= 100):
|
|
990
|
+
raise ValidationError("max_backups must be 1-100")
|
|
991
|
+
|
|
992
|
+
parameters = {
|
|
993
|
+
"site_id": site_id,
|
|
994
|
+
"backup_type": backup_type,
|
|
995
|
+
"frequency": frequency,
|
|
996
|
+
"time_of_day": time_of_day,
|
|
997
|
+
"enabled": enabled,
|
|
998
|
+
"retention_days": retention_days,
|
|
999
|
+
"max_backups": max_backups,
|
|
1000
|
+
"day_of_week": day_of_week,
|
|
1001
|
+
"day_of_month": day_of_month,
|
|
1002
|
+
"cloud_backup_enabled": cloud_backup_enabled,
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
if dry_run:
|
|
1006
|
+
logger.info(
|
|
1007
|
+
f"DRY RUN: Would configure {frequency} {backup_type} backups at {time_of_day} for site '{site_id}'"
|
|
1008
|
+
)
|
|
1009
|
+
log_audit(
|
|
1010
|
+
operation="schedule_backups",
|
|
1011
|
+
parameters=parameters,
|
|
1012
|
+
result="dry_run",
|
|
1013
|
+
site_id=site_id,
|
|
1014
|
+
dry_run=True,
|
|
1015
|
+
)
|
|
1016
|
+
return {
|
|
1017
|
+
"dry_run": True,
|
|
1018
|
+
"would_configure": parameters,
|
|
1019
|
+
"next_run": "Calculated after configuration",
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
try:
|
|
1023
|
+
async with UniFiClient(settings) as client:
|
|
1024
|
+
await client.authenticate()
|
|
1025
|
+
|
|
1026
|
+
# Configure backup schedule via API
|
|
1027
|
+
schedule_response = await client.configure_backup_schedule(
|
|
1028
|
+
site_id=site_id,
|
|
1029
|
+
backup_type=backup_type,
|
|
1030
|
+
frequency=frequency,
|
|
1031
|
+
time_of_day=time_of_day,
|
|
1032
|
+
enabled=enabled,
|
|
1033
|
+
retention_days=retention_days,
|
|
1034
|
+
max_backups=max_backups,
|
|
1035
|
+
day_of_week=day_of_week,
|
|
1036
|
+
day_of_month=day_of_month,
|
|
1037
|
+
cloud_backup_enabled=cloud_backup_enabled,
|
|
1038
|
+
)
|
|
1039
|
+
|
|
1040
|
+
# Generate schedule ID
|
|
1041
|
+
schedule_id = schedule_response.get(
|
|
1042
|
+
"schedule_id", f"schedule_{frequency}_{backup_type}_{site_id}"
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1045
|
+
result = {
|
|
1046
|
+
"schedule_id": schedule_id,
|
|
1047
|
+
"site_id": site_id,
|
|
1048
|
+
"enabled": enabled,
|
|
1049
|
+
"backup_type": backup_type,
|
|
1050
|
+
"frequency": frequency,
|
|
1051
|
+
"time_of_day": time_of_day,
|
|
1052
|
+
"day_of_week": day_of_week,
|
|
1053
|
+
"day_of_month": day_of_month,
|
|
1054
|
+
"retention_days": retention_days,
|
|
1055
|
+
"max_backups": max_backups,
|
|
1056
|
+
"cloud_backup_enabled": cloud_backup_enabled,
|
|
1057
|
+
"configured_at": datetime.now().isoformat(),
|
|
1058
|
+
"next_run": schedule_response.get("next_run", None),
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
logger.info(
|
|
1062
|
+
f"Configured {frequency} {backup_type} backup schedule for site '{site_id}' at {time_of_day}"
|
|
1063
|
+
)
|
|
1064
|
+
log_audit(
|
|
1065
|
+
operation="schedule_backups",
|
|
1066
|
+
parameters=parameters,
|
|
1067
|
+
result="success",
|
|
1068
|
+
site_id=site_id,
|
|
1069
|
+
)
|
|
1070
|
+
|
|
1071
|
+
return result
|
|
1072
|
+
|
|
1073
|
+
except AttributeError:
|
|
1074
|
+
# Fallback if API doesn't support scheduling
|
|
1075
|
+
logger.warning("Backup scheduling API not available on this controller version.")
|
|
1076
|
+
raise ValidationError(
|
|
1077
|
+
"Backup scheduling not supported by this controller. "
|
|
1078
|
+
"Consider using external cron jobs to call trigger_backup."
|
|
1079
|
+
) from None
|
|
1080
|
+
except Exception as e:
|
|
1081
|
+
logger.error(f"Failed to configure backup schedule for site '{site_id}': {e}")
|
|
1082
|
+
log_audit(
|
|
1083
|
+
operation="schedule_backups",
|
|
1084
|
+
parameters=parameters,
|
|
1085
|
+
result="error",
|
|
1086
|
+
error=str(e),
|
|
1087
|
+
site_id=site_id,
|
|
1088
|
+
)
|
|
1089
|
+
raise
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
async def get_backup_schedule(
|
|
1093
|
+
site_id: str,
|
|
1094
|
+
settings: Settings,
|
|
1095
|
+
) -> dict[str, Any]:
|
|
1096
|
+
"""Get the configured automated backup schedule for a site.
|
|
1097
|
+
|
|
1098
|
+
Retrieve details about the current backup schedule including frequency,
|
|
1099
|
+
retention policy, and next scheduled execution.
|
|
1100
|
+
|
|
1101
|
+
Args:
|
|
1102
|
+
site_id: Site identifier
|
|
1103
|
+
settings: Application settings
|
|
1104
|
+
|
|
1105
|
+
Returns:
|
|
1106
|
+
Backup schedule configuration, or None if no schedule is configured
|
|
1107
|
+
|
|
1108
|
+
Example:
|
|
1109
|
+
```python
|
|
1110
|
+
schedule = await get_backup_schedule(
|
|
1111
|
+
site_id="default",
|
|
1112
|
+
settings=settings
|
|
1113
|
+
)
|
|
1114
|
+
|
|
1115
|
+
if schedule and schedule['enabled']:
|
|
1116
|
+
print(f"Backups run {schedule['frequency']} at {schedule['time_of_day']}")
|
|
1117
|
+
print(f"Next backup: {schedule['next_run']}")
|
|
1118
|
+
print(f"Retention: {schedule['retention_days']} days, max {schedule['max_backups']} backups")
|
|
1119
|
+
else:
|
|
1120
|
+
print("No automated backup schedule configured")
|
|
1121
|
+
```
|
|
1122
|
+
"""
|
|
1123
|
+
site_id = validate_site_id(site_id)
|
|
1124
|
+
logger = get_logger(__name__, settings.log_level)
|
|
1125
|
+
|
|
1126
|
+
try:
|
|
1127
|
+
async with UniFiClient(settings) as client:
|
|
1128
|
+
await client.authenticate()
|
|
1129
|
+
|
|
1130
|
+
# Retrieve backup schedule configuration
|
|
1131
|
+
schedule_data = await client.get_backup_schedule(site_id=site_id)
|
|
1132
|
+
|
|
1133
|
+
if not schedule_data:
|
|
1134
|
+
logger.info(f"No backup schedule configured for site '{site_id}'")
|
|
1135
|
+
return {
|
|
1136
|
+
"configured": False,
|
|
1137
|
+
"message": "No automated backup schedule configured for this site",
|
|
1138
|
+
}
|
|
1139
|
+
|
|
1140
|
+
result = {
|
|
1141
|
+
"configured": True,
|
|
1142
|
+
"schedule_id": schedule_data.get("schedule_id", ""),
|
|
1143
|
+
"enabled": schedule_data.get("enabled", False),
|
|
1144
|
+
"backup_type": schedule_data.get("backup_type", ""),
|
|
1145
|
+
"frequency": schedule_data.get("frequency", ""),
|
|
1146
|
+
"time_of_day": schedule_data.get("time_of_day", ""),
|
|
1147
|
+
"day_of_week": schedule_data.get("day_of_week", None),
|
|
1148
|
+
"day_of_month": schedule_data.get("day_of_month", None),
|
|
1149
|
+
"retention_days": schedule_data.get("retention_days", 30),
|
|
1150
|
+
"max_backups": schedule_data.get("max_backups", 10),
|
|
1151
|
+
"cloud_backup_enabled": schedule_data.get("cloud_backup_enabled", False),
|
|
1152
|
+
"last_run": schedule_data.get("last_run", None),
|
|
1153
|
+
"last_backup_id": schedule_data.get("last_backup_id", None),
|
|
1154
|
+
"next_run": schedule_data.get("next_run", None),
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
logger.info(
|
|
1158
|
+
f"Retrieved backup schedule for site '{site_id}': "
|
|
1159
|
+
f"{result['frequency']} {result['backup_type']} at {result['time_of_day']}"
|
|
1160
|
+
)
|
|
1161
|
+
return result
|
|
1162
|
+
|
|
1163
|
+
except AttributeError:
|
|
1164
|
+
# Fallback if API doesn't support scheduling
|
|
1165
|
+
logger.warning("Backup scheduling API not available on this controller version.")
|
|
1166
|
+
return {
|
|
1167
|
+
"configured": False,
|
|
1168
|
+
"message": "Backup scheduling not supported by this controller version",
|
|
1169
|
+
"recommendation": "Use external cron jobs to schedule trigger_backup calls",
|
|
1170
|
+
}
|
|
1171
|
+
except Exception as e:
|
|
1172
|
+
logger.error(f"Failed to get backup schedule for site '{site_id}': {e}")
|
|
1173
|
+
raise
|