uiprotect 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of uiprotect might be problematic. Click here for more details.
- uiprotect/__init__.py +13 -0
- uiprotect/__main__.py +24 -0
- uiprotect/api.py +1936 -0
- uiprotect/cli/__init__.py +314 -0
- uiprotect/cli/backup.py +1103 -0
- uiprotect/cli/base.py +238 -0
- uiprotect/cli/cameras.py +574 -0
- uiprotect/cli/chimes.py +180 -0
- uiprotect/cli/doorlocks.py +125 -0
- uiprotect/cli/events.py +258 -0
- uiprotect/cli/lights.py +119 -0
- uiprotect/cli/liveviews.py +65 -0
- uiprotect/cli/nvr.py +154 -0
- uiprotect/cli/sensors.py +278 -0
- uiprotect/cli/viewers.py +76 -0
- uiprotect/data/__init__.py +157 -0
- uiprotect/data/base.py +1116 -0
- uiprotect/data/bootstrap.py +634 -0
- uiprotect/data/convert.py +77 -0
- uiprotect/data/devices.py +3384 -0
- uiprotect/data/nvr.py +1520 -0
- uiprotect/data/types.py +630 -0
- uiprotect/data/user.py +236 -0
- uiprotect/data/websocket.py +236 -0
- uiprotect/exceptions.py +41 -0
- uiprotect/py.typed +0 -0
- uiprotect/release_cache.json +1 -0
- uiprotect/stream.py +166 -0
- uiprotect/test_util/__init__.py +531 -0
- uiprotect/test_util/anonymize.py +257 -0
- uiprotect/utils.py +610 -0
- uiprotect/websocket.py +225 -0
- uiprotect-0.1.0.dist-info/LICENSE +23 -0
- uiprotect-0.1.0.dist-info/METADATA +245 -0
- uiprotect-0.1.0.dist-info/RECORD +37 -0
- uiprotect-0.1.0.dist-info/WHEEL +4 -0
- uiprotect-0.1.0.dist-info/entry_points.txt +3 -0
uiprotect/cli/backup.py
ADDED
|
@@ -0,0 +1,1103 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import logging
|
|
5
|
+
import math
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from datetime import datetime, timedelta, timezone
|
|
11
|
+
from enum import Enum
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import TYPE_CHECKING, Any, Optional, cast
|
|
14
|
+
|
|
15
|
+
import aiofiles
|
|
16
|
+
import aiofiles.os as aos
|
|
17
|
+
import av
|
|
18
|
+
import dateparser
|
|
19
|
+
import typer
|
|
20
|
+
from PIL import Image
|
|
21
|
+
from rich.progress import (
|
|
22
|
+
BarColumn,
|
|
23
|
+
MofNCompleteColumn,
|
|
24
|
+
Progress,
|
|
25
|
+
TaskProgressColumn,
|
|
26
|
+
TextColumn,
|
|
27
|
+
TimeRemainingColumn,
|
|
28
|
+
track,
|
|
29
|
+
)
|
|
30
|
+
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, func, or_, select
|
|
31
|
+
from sqlalchemy import event as saevent
|
|
32
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
|
33
|
+
from sqlalchemy.orm import Mapped, declarative_base, relationship
|
|
34
|
+
|
|
35
|
+
from uiprotect import data as d
|
|
36
|
+
from uiprotect.api import ProtectApiClient
|
|
37
|
+
from uiprotect.cli import base
|
|
38
|
+
from uiprotect.utils import (
|
|
39
|
+
format_duration,
|
|
40
|
+
get_local_timezone,
|
|
41
|
+
local_datetime,
|
|
42
|
+
utc_now,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
if TYPE_CHECKING:
|
|
46
|
+
from click.core import Parameter
|
|
47
|
+
|
|
48
|
+
app = typer.Typer(rich_markup_mode="rich")
|
|
49
|
+
Base = declarative_base()
|
|
50
|
+
|
|
51
|
+
_LOGGER = logging.getLogger(__name__)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _on_db_connect(dbapi_con, connection_record) -> None: # type: ignore[no-untyped-def]
|
|
55
|
+
cursor = dbapi_con.cursor()
|
|
56
|
+
cursor.execute("PRAGMA journal_mode=WAL")
|
|
57
|
+
cursor.execute("PRAGMA synchronous=NORMAL")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class BackupContext(base.CliContext):
|
|
62
|
+
start: datetime
|
|
63
|
+
end: datetime | None
|
|
64
|
+
output: Path
|
|
65
|
+
seperator: str
|
|
66
|
+
thumbnail_format: str
|
|
67
|
+
gif_format: str
|
|
68
|
+
event_format: str
|
|
69
|
+
title_format: str
|
|
70
|
+
max_download: int
|
|
71
|
+
page_size: int
|
|
72
|
+
length_cutoff: timedelta
|
|
73
|
+
_db_engine: AsyncEngine | None = None
|
|
74
|
+
_db_session: AsyncSession | None = None
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def download_thumbnails(self) -> bool:
|
|
78
|
+
return self.thumbnail_format != ""
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def download_gifs(self) -> bool:
|
|
82
|
+
return self.gif_format != ""
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def download_videos(self) -> bool:
|
|
86
|
+
return self.event_format != ""
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def db_file(self) -> Path:
|
|
90
|
+
return self.output / "events.db"
|
|
91
|
+
|
|
92
|
+
@property
|
|
93
|
+
def db_engine(self) -> AsyncEngine:
|
|
94
|
+
if self._db_engine is None:
|
|
95
|
+
self._db_engine = create_async_engine(f"sqlite+aiosqlite:///{self.db_file}")
|
|
96
|
+
self._db_session = None
|
|
97
|
+
saevent.listens_for(self._db_engine.sync_engine, "connect")(_on_db_connect)
|
|
98
|
+
|
|
99
|
+
return self._db_engine
|
|
100
|
+
|
|
101
|
+
def create_db_session(self) -> AsyncSession:
|
|
102
|
+
return AsyncSession(bind=self.db_engine, expire_on_commit=False)
|
|
103
|
+
|
|
104
|
+
async def create_db(self) -> None:
|
|
105
|
+
async with self.db_engine.begin() as conn:
|
|
106
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class EventTypeChoice(str, Enum):
|
|
110
|
+
MOTION = d.EventType.MOTION.value
|
|
111
|
+
RING = d.EventType.RING.value
|
|
112
|
+
SMART_DETECT = d.EventType.SMART_DETECT.value
|
|
113
|
+
SMART_DETECT_LINE = d.EventType.SMART_DETECT_LINE.value
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class EventSmartType(Base): # type: ignore[valid-type,misc]
|
|
117
|
+
__tablename__ = "event_smart_type"
|
|
118
|
+
|
|
119
|
+
id = Column(Integer, primary_key=True)
|
|
120
|
+
event_id = Column(String(24), ForeignKey("event.id"), nullable=False)
|
|
121
|
+
smart_type = Column(String(32), index=True)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class Event(Base): # type: ignore[valid-type,misc]
|
|
125
|
+
__tablename__ = "event"
|
|
126
|
+
__allow_unmapped__ = True
|
|
127
|
+
|
|
128
|
+
id = Column(String(24), primary_key=True)
|
|
129
|
+
start_naive = Column(DateTime())
|
|
130
|
+
end_naive = Column(DateTime(), nullable=True)
|
|
131
|
+
camera_mac = Column(String(12), index=True)
|
|
132
|
+
event_type = Column(String(32), index=True)
|
|
133
|
+
|
|
134
|
+
smart_detect_types: Mapped[list[EventSmartType]] = relationship(
|
|
135
|
+
"EventSmartType",
|
|
136
|
+
lazy="joined",
|
|
137
|
+
uselist=True,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
_start: datetime | None = None
|
|
141
|
+
_end: datetime | None = None
|
|
142
|
+
_smart_types: set[str] | None = None
|
|
143
|
+
_context: dict[str, str] | None = None
|
|
144
|
+
_glob_context: dict[str, str] | None = None
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def start(self) -> datetime:
|
|
148
|
+
if self._start is None:
|
|
149
|
+
self._start = self.start_naive.replace(tzinfo=timezone.utc) # type: ignore[union-attr]
|
|
150
|
+
return self._start
|
|
151
|
+
|
|
152
|
+
@property
|
|
153
|
+
def end(self) -> datetime | None:
|
|
154
|
+
if self._end is None and self.end_naive is not None:
|
|
155
|
+
self._end = self.end_naive.replace(tzinfo=timezone.utc)
|
|
156
|
+
return self._end
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def smart_types(self) -> set[str]:
|
|
160
|
+
if self._smart_types is None:
|
|
161
|
+
self._smart_types = {s.smart_type for s in self.smart_detect_types} # type: ignore[misc]
|
|
162
|
+
return self._smart_types
|
|
163
|
+
|
|
164
|
+
def get_file_context(self, ctx: BackupContext) -> dict[str, str]:
|
|
165
|
+
if self._context is None:
|
|
166
|
+
camera = ctx.protect.bootstrap.get_device_from_mac(self.camera_mac) # type: ignore[arg-type]
|
|
167
|
+
camera_slug = ""
|
|
168
|
+
display_name = ""
|
|
169
|
+
length = timedelta(seconds=0)
|
|
170
|
+
if camera is not None:
|
|
171
|
+
camera_slug = (
|
|
172
|
+
camera.display_name.lower().replace(" ", ctx.seperator)
|
|
173
|
+
+ ctx.seperator
|
|
174
|
+
)
|
|
175
|
+
display_name = camera.display_name
|
|
176
|
+
if self.end is not None:
|
|
177
|
+
length = self.end - self.start
|
|
178
|
+
|
|
179
|
+
event_type = str(self.event_type)
|
|
180
|
+
event_type_pretty = f"{event_type.title()} Event"
|
|
181
|
+
if event_type in {
|
|
182
|
+
d.EventType.SMART_DETECT.value,
|
|
183
|
+
d.EventType.SMART_DETECT_LINE.value,
|
|
184
|
+
}:
|
|
185
|
+
smart_types = list(self.smart_types)
|
|
186
|
+
smart_types.sort()
|
|
187
|
+
event_type = f"{event_type}[{','.join(smart_types)}]"
|
|
188
|
+
smart_types_title = [s.title() for s in smart_types]
|
|
189
|
+
event_type_pretty = f"Smart Detection ({', '.join(smart_types_title)})"
|
|
190
|
+
|
|
191
|
+
start_local = local_datetime(self.start)
|
|
192
|
+
self._context = {
|
|
193
|
+
"year": str(self.start.year),
|
|
194
|
+
"month": str(self.start.month),
|
|
195
|
+
"day": str(self.start.day),
|
|
196
|
+
"hour": str(self.start.hour),
|
|
197
|
+
"minute": str(self.start.minute),
|
|
198
|
+
"datetime": self.start.strftime("%Y-%m-%dT%H-%M-%S%z").replace(
|
|
199
|
+
"-",
|
|
200
|
+
ctx.seperator,
|
|
201
|
+
),
|
|
202
|
+
"date": self.start.strftime("%Y-%m-%d").replace("-", ctx.seperator),
|
|
203
|
+
"time": self.start.strftime("%H-%M-%S%z").replace("-", ctx.seperator),
|
|
204
|
+
"time_sort_pretty": self.start.strftime("%H:%M:%S (%Z)"),
|
|
205
|
+
"time_pretty": self.start.strftime("%I:%M:%S %p (%Z)"),
|
|
206
|
+
"year_local": str(start_local.year),
|
|
207
|
+
"month_local": str(start_local.month),
|
|
208
|
+
"day_local": str(start_local.day),
|
|
209
|
+
"hour_local": str(start_local.hour),
|
|
210
|
+
"minute_local": str(start_local.minute),
|
|
211
|
+
"datetime_local": start_local.strftime("%Y-%m-%dT%H-%M-%S%z").replace(
|
|
212
|
+
"-",
|
|
213
|
+
ctx.seperator,
|
|
214
|
+
),
|
|
215
|
+
"date_local": start_local.strftime("%Y-%m-%d").replace(
|
|
216
|
+
"-",
|
|
217
|
+
ctx.seperator,
|
|
218
|
+
),
|
|
219
|
+
"time_local": start_local.strftime("%H-%M-%S%z").replace(
|
|
220
|
+
"-",
|
|
221
|
+
ctx.seperator,
|
|
222
|
+
),
|
|
223
|
+
"time_sort_pretty_local": start_local.strftime("%H:%M:%S (%Z)"),
|
|
224
|
+
"time_pretty_local": start_local.strftime("%I:%M:%S %p (%Z)"),
|
|
225
|
+
"mac": str(self.camera_mac),
|
|
226
|
+
"camera_name": display_name,
|
|
227
|
+
"camera_slug": camera_slug,
|
|
228
|
+
"event_type": event_type,
|
|
229
|
+
"event_type_pretty": event_type_pretty,
|
|
230
|
+
"length_pretty": format_duration(length),
|
|
231
|
+
"sep": ctx.seperator,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
self._context["title"] = ctx.title_format.format(**self._context)
|
|
235
|
+
return self._context
|
|
236
|
+
|
|
237
|
+
def get_glob_file_context(self, ctx: BackupContext) -> dict[str, str]:
|
|
238
|
+
if self._glob_context is None:
|
|
239
|
+
self._glob_context = self.get_file_context(ctx).copy()
|
|
240
|
+
self._glob_context["camera_slug"] = "*"
|
|
241
|
+
self._glob_context["camera_name"] = "*"
|
|
242
|
+
return self._glob_context
|
|
243
|
+
|
|
244
|
+
def get_thumbnail_path(self, ctx: BackupContext) -> Path:
|
|
245
|
+
context = self.get_file_context(ctx)
|
|
246
|
+
file_path = ctx.thumbnail_format.format(**context)
|
|
247
|
+
return ctx.output / file_path
|
|
248
|
+
|
|
249
|
+
def get_existing_thumbnail_path(self, ctx: BackupContext) -> Path | None:
|
|
250
|
+
context = self.get_glob_file_context(ctx)
|
|
251
|
+
file_path = ctx.thumbnail_format.format(**context)
|
|
252
|
+
|
|
253
|
+
paths = list(ctx.output.glob(file_path))
|
|
254
|
+
if paths:
|
|
255
|
+
return paths[0]
|
|
256
|
+
return None
|
|
257
|
+
|
|
258
|
+
def get_gif_path(self, ctx: BackupContext) -> Path:
|
|
259
|
+
context = self.get_file_context(ctx)
|
|
260
|
+
file_path = ctx.gif_format.format(**context)
|
|
261
|
+
return ctx.output / file_path
|
|
262
|
+
|
|
263
|
+
def get_existing_gif_path(self, ctx: BackupContext) -> Path | None:
|
|
264
|
+
context = self.get_glob_file_context(ctx)
|
|
265
|
+
file_path = ctx.gif_format.format(**context)
|
|
266
|
+
|
|
267
|
+
paths = list(ctx.output.glob(file_path))
|
|
268
|
+
if paths:
|
|
269
|
+
return paths[0]
|
|
270
|
+
return None
|
|
271
|
+
|
|
272
|
+
def get_event_path(self, ctx: BackupContext) -> Path:
|
|
273
|
+
context = self.get_file_context(ctx)
|
|
274
|
+
file_path = ctx.event_format.format(**context)
|
|
275
|
+
return ctx.output / file_path
|
|
276
|
+
|
|
277
|
+
def get_existing_event_path(self, ctx: BackupContext) -> Path | None:
|
|
278
|
+
context = self.get_glob_file_context(ctx)
|
|
279
|
+
file_path = ctx.event_format.format(**context)
|
|
280
|
+
|
|
281
|
+
paths = list(ctx.output.glob(file_path))
|
|
282
|
+
if paths:
|
|
283
|
+
return paths[0]
|
|
284
|
+
return None
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
@dataclass
|
|
288
|
+
class QueuedDownload:
|
|
289
|
+
task: asyncio.Task[bool] | None
|
|
290
|
+
args: list[Any]
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def relative_datetime(ctx: typer.Context, value: str, param: Parameter) -> datetime:
|
|
294
|
+
if dt := dateparser.parse(value):
|
|
295
|
+
return dt
|
|
296
|
+
|
|
297
|
+
raise typer.BadParameter(
|
|
298
|
+
"Must be a ISO 8601 format or human readable relative format",
|
|
299
|
+
ctx,
|
|
300
|
+
param,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
_DownloadEventQueue = asyncio.Queue[QueuedDownload]
|
|
305
|
+
|
|
306
|
+
OPTION_OUTPUT = typer.Option(
|
|
307
|
+
None,
|
|
308
|
+
help="Base dir for creating files. Defaults to $PWD.",
|
|
309
|
+
envvar="UFP_BACKUP_OUTPUT",
|
|
310
|
+
)
|
|
311
|
+
OPTION_START = typer.Option(
|
|
312
|
+
None,
|
|
313
|
+
"-s",
|
|
314
|
+
"--start",
|
|
315
|
+
help="Cutoff for start of backup. Defaults to start of recording for NVR.",
|
|
316
|
+
envvar="UFP_BACKUP_START",
|
|
317
|
+
)
|
|
318
|
+
OPTION_PAGE_SIZE = typer.Option(
|
|
319
|
+
1000,
|
|
320
|
+
"--page-size",
|
|
321
|
+
help="Number of events fetched at once from local database. Increases memory usage.",
|
|
322
|
+
)
|
|
323
|
+
OPTION_LENGTH_CUTOFF = typer.Option(
|
|
324
|
+
timedelta(hours=1).total_seconds(),
|
|
325
|
+
"--length-cutoff",
|
|
326
|
+
help="Event size cutoff for detecting abnormal events (in seconds).",
|
|
327
|
+
)
|
|
328
|
+
OPTION_END = typer.Option(
|
|
329
|
+
None,
|
|
330
|
+
"-e",
|
|
331
|
+
"--end",
|
|
332
|
+
help="Cutoff for end of backup. Defaults to now.",
|
|
333
|
+
envvar="UFP_BACKUP_END",
|
|
334
|
+
)
|
|
335
|
+
OPTION_EVENT_TYPES = typer.Option(
|
|
336
|
+
list(EventTypeChoice),
|
|
337
|
+
"-t",
|
|
338
|
+
"--event-type",
|
|
339
|
+
help="Events to export. Can be used multiple time.",
|
|
340
|
+
)
|
|
341
|
+
OPTION_SMART_TYPES = typer.Option(
|
|
342
|
+
list(d.SmartDetectObjectType),
|
|
343
|
+
"-m",
|
|
344
|
+
"--smart-type",
|
|
345
|
+
help="Smart Detection types to export. Can be used multiple time.",
|
|
346
|
+
)
|
|
347
|
+
OPTION_SPERATOR = typer.Option("-", "--sep", help="Separator used for formatting.")
|
|
348
|
+
OPTION_THUMBNAIL_FORMAT = typer.Option(
|
|
349
|
+
"{year}/{month}/{day}/{hour}/{datetime}{sep}{mac}{sep}{camera_slug}{event_type}{sep}thumb.jpg",
|
|
350
|
+
"--thumb-format",
|
|
351
|
+
help='Filename format to save event thumbnails to. Set to empty string ("") to skip saving event thumbnails.',
|
|
352
|
+
)
|
|
353
|
+
OPTION_GIF_FORMAT = typer.Option(
|
|
354
|
+
"{year}/{month}/{day}/{hour}/{datetime}{sep}{mac}{sep}{camera_slug}{event_type}{sep}animated.gif",
|
|
355
|
+
"--gif-format",
|
|
356
|
+
help='Filename format to save event gifs to. Set to empty string ("") to skip saving event gif.',
|
|
357
|
+
)
|
|
358
|
+
OPTION_EVENT_FORMAT = typer.Option(
|
|
359
|
+
"{year}/{month}/{day}/{hour}/{datetime}{sep}{mac}{sep}{camera_slug}{event_type}.mp4",
|
|
360
|
+
"--event-format",
|
|
361
|
+
help='Filename format to save event gifs to. Set to empty string ("") to skip saving event videos.',
|
|
362
|
+
)
|
|
363
|
+
OPTION_TITLE_FORMAT = typer.Option(
|
|
364
|
+
"{time_sort_pretty_local} {sep} {camera_name} {sep} {event_type_pretty} {sep} {length_pretty}",
|
|
365
|
+
"--title-format",
|
|
366
|
+
help="Format to use to tag title for video metadata.",
|
|
367
|
+
)
|
|
368
|
+
OPTION_VERBOSE = typer.Option(False, "-v", "--verbose", help="Debug logging.")
|
|
369
|
+
OPTION_MAX_DOWNLOAD = typer.Option(
|
|
370
|
+
5,
|
|
371
|
+
"-d",
|
|
372
|
+
"--max-download",
|
|
373
|
+
help="Max number of concurrent downloads. Adds additional loads to NVR.",
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _setup_logger(verbose: bool) -> None:
|
|
378
|
+
console_handler = logging.StreamHandler()
|
|
379
|
+
log_format = "[%(asctime)s] %(levelname)s - %(message)s"
|
|
380
|
+
if verbose:
|
|
381
|
+
console_handler.setLevel(logging.DEBUG)
|
|
382
|
+
elif sys.stdout.isatty():
|
|
383
|
+
console_handler.setLevel(logging.WARNING)
|
|
384
|
+
log_format = "%(message)s"
|
|
385
|
+
else:
|
|
386
|
+
console_handler.setLevel(logging.INFO)
|
|
387
|
+
|
|
388
|
+
formatter = logging.Formatter(log_format)
|
|
389
|
+
console_handler.setFormatter(formatter)
|
|
390
|
+
|
|
391
|
+
root_logger = logging.getLogger("uiprotect")
|
|
392
|
+
root_logger.setLevel(logging.DEBUG)
|
|
393
|
+
root_logger.addHandler(console_handler)
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
@app.callback()
|
|
397
|
+
def main(
|
|
398
|
+
ctx: typer.Context,
|
|
399
|
+
start: Optional[str] = OPTION_START,
|
|
400
|
+
end: Optional[str] = OPTION_END,
|
|
401
|
+
output_folder: Optional[Path] = OPTION_OUTPUT,
|
|
402
|
+
thumbnail_format: str = OPTION_THUMBNAIL_FORMAT,
|
|
403
|
+
gif_format: str = OPTION_GIF_FORMAT,
|
|
404
|
+
event_format: str = OPTION_EVENT_FORMAT,
|
|
405
|
+
title_format: str = OPTION_TITLE_FORMAT,
|
|
406
|
+
verbose: bool = OPTION_VERBOSE,
|
|
407
|
+
max_download: int = OPTION_MAX_DOWNLOAD,
|
|
408
|
+
page_size: int = OPTION_PAGE_SIZE,
|
|
409
|
+
length_cutoff: int = OPTION_LENGTH_CUTOFF,
|
|
410
|
+
seperator: str = OPTION_SPERATOR,
|
|
411
|
+
) -> None:
|
|
412
|
+
"""
|
|
413
|
+
Backup CLI.
|
|
414
|
+
|
|
415
|
+
The backup CLI is still very WIP in progress and consider experimental and potentially unstable (interface may change in the future).
|
|
416
|
+
"""
|
|
417
|
+
_setup_logger(verbose)
|
|
418
|
+
|
|
419
|
+
protect: ProtectApiClient = ctx.obj.protect
|
|
420
|
+
local_tz = get_local_timezone()
|
|
421
|
+
|
|
422
|
+
if start is None:
|
|
423
|
+
start_dt = protect.bootstrap.recording_start
|
|
424
|
+
else:
|
|
425
|
+
start_dt = relative_datetime(ctx, start, ctx.command.params[0])
|
|
426
|
+
start_dt = start_dt.replace(tzinfo=local_tz)
|
|
427
|
+
if start_dt is None:
|
|
428
|
+
start_dt = utc_now()
|
|
429
|
+
|
|
430
|
+
end_dt = None
|
|
431
|
+
if end is not None:
|
|
432
|
+
end_dt = relative_datetime(ctx, end, ctx.command.params[1])
|
|
433
|
+
end_dt = end_dt.replace(tzinfo=local_tz)
|
|
434
|
+
|
|
435
|
+
if output_folder is None:
|
|
436
|
+
output_folder = Path(os.getcwd())
|
|
437
|
+
|
|
438
|
+
context = BackupContext(
|
|
439
|
+
protect=ctx.obj.protect,
|
|
440
|
+
start=start_dt,
|
|
441
|
+
end=end_dt,
|
|
442
|
+
output_format=ctx.obj.output_format,
|
|
443
|
+
output=output_folder,
|
|
444
|
+
thumbnail_format=thumbnail_format,
|
|
445
|
+
gif_format=gif_format,
|
|
446
|
+
event_format=event_format,
|
|
447
|
+
title_format=title_format,
|
|
448
|
+
max_download=max_download,
|
|
449
|
+
page_size=page_size,
|
|
450
|
+
length_cutoff=timedelta(seconds=length_cutoff),
|
|
451
|
+
seperator=seperator,
|
|
452
|
+
)
|
|
453
|
+
ctx.obj = context
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def _wipe_files(ctx: BackupContext, no_input: bool) -> None:
|
|
457
|
+
if not no_input and not typer.confirm(
|
|
458
|
+
"Are you sure you want to delete all existing thumbnails and video clips?",
|
|
459
|
+
):
|
|
460
|
+
raise typer.Exit(1)
|
|
461
|
+
|
|
462
|
+
if ctx.db_file.exists():
|
|
463
|
+
os.remove(ctx.db_file)
|
|
464
|
+
|
|
465
|
+
for path in track(ctx.output.glob("**/*.jpg"), description="Deleting Thumbnails"):
|
|
466
|
+
os.remove(path)
|
|
467
|
+
|
|
468
|
+
for path in track(ctx.output.glob("**/*.mp4"), description="Deleting Clips"):
|
|
469
|
+
os.remove(path)
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
async def _newest_event(ctx: BackupContext) -> Event | None:
|
|
473
|
+
db = ctx.create_db_session()
|
|
474
|
+
async with db:
|
|
475
|
+
result = await db.execute(select(Event).order_by(Event.start_naive.desc()))
|
|
476
|
+
return result.scalars().first()
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
async def _prune_events(ctx: BackupContext) -> int:
|
|
480
|
+
_LOGGER.debug("Pruning events before %s", ctx.start)
|
|
481
|
+
|
|
482
|
+
deleted = 0
|
|
483
|
+
db = ctx.create_db_session()
|
|
484
|
+
async with db:
|
|
485
|
+
result = await db.execute(
|
|
486
|
+
select(Event).join(EventSmartType).where(Event.start_naive < ctx.start),
|
|
487
|
+
)
|
|
488
|
+
for event in track(result.unique().scalars(), description="Pruning Events"):
|
|
489
|
+
thumb_path = event.get_thumbnail_path(ctx)
|
|
490
|
+
if thumb_path.exists():
|
|
491
|
+
_LOGGER.debug("Delete file %s", thumb_path)
|
|
492
|
+
await aos.remove(thumb_path)
|
|
493
|
+
|
|
494
|
+
event_path = event.get_event_path(ctx)
|
|
495
|
+
if event_path.exists():
|
|
496
|
+
_LOGGER.debug("Delete file %s", event_path)
|
|
497
|
+
await aos.remove(event_path)
|
|
498
|
+
|
|
499
|
+
if event.event_type in {
|
|
500
|
+
d.EventType.SMART_DETECT.value,
|
|
501
|
+
d.EventType.SMART_DETECT_LINE.value,
|
|
502
|
+
}:
|
|
503
|
+
for smart_type in event.smart_detect_types:
|
|
504
|
+
await db.delete(smart_type)
|
|
505
|
+
await db.delete(event)
|
|
506
|
+
deleted += 1
|
|
507
|
+
await db.commit()
|
|
508
|
+
|
|
509
|
+
return deleted
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
async def _update_event(ctx: BackupContext, event: d.Event) -> None:
|
|
513
|
+
if event.camera is None:
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
db = ctx.create_db_session()
|
|
517
|
+
to_delete: list[EventSmartType] = []
|
|
518
|
+
async with db:
|
|
519
|
+
result = await db.execute(select(Event).where(Event.id == event.id))
|
|
520
|
+
db_event = result.scalars().first()
|
|
521
|
+
do_insert = False
|
|
522
|
+
if db_event is None:
|
|
523
|
+
db_event = Event(id=event.id)
|
|
524
|
+
do_insert = True
|
|
525
|
+
db_event.start_naive = event.start
|
|
526
|
+
db_event.end_naive = event.end
|
|
527
|
+
db_event.camera_mac = event.camera.mac
|
|
528
|
+
db_event.event_type = event.type.value
|
|
529
|
+
|
|
530
|
+
if event.type in {
|
|
531
|
+
d.EventType.SMART_DETECT.value,
|
|
532
|
+
d.EventType.SMART_DETECT_LINE.value,
|
|
533
|
+
}:
|
|
534
|
+
types = {e.value for e in event.smart_detect_types}
|
|
535
|
+
|
|
536
|
+
result = await db.execute(
|
|
537
|
+
select(EventSmartType).where(EventSmartType.event_id == event.id),
|
|
538
|
+
)
|
|
539
|
+
for event_smart_type in result.unique().scalars():
|
|
540
|
+
event_type = cast(EventSmartType, event_smart_type)
|
|
541
|
+
if event_type.smart_type not in types:
|
|
542
|
+
to_delete.append(event_type)
|
|
543
|
+
else:
|
|
544
|
+
types.remove(event_type.smart_type)
|
|
545
|
+
|
|
546
|
+
for smart_type_str in types:
|
|
547
|
+
db.add(EventSmartType(event_id=event.id, smart_type=smart_type_str))
|
|
548
|
+
|
|
549
|
+
if do_insert:
|
|
550
|
+
db.add(db_event)
|
|
551
|
+
for smart_type in to_delete:
|
|
552
|
+
await db.delete(smart_type)
|
|
553
|
+
await db.commit()
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
async def _update_ongoing_events(ctx: BackupContext) -> int:
|
|
557
|
+
db = ctx.create_db_session()
|
|
558
|
+
async with db:
|
|
559
|
+
result = await db.execute(
|
|
560
|
+
select(Event)
|
|
561
|
+
.where(Event.event_type != "ring")
|
|
562
|
+
.where(Event.end_naive is None), # type: ignore[arg-type]
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
events = list(result.unique().scalars())
|
|
566
|
+
|
|
567
|
+
if len(events) == 0:
|
|
568
|
+
return 0
|
|
569
|
+
for event in track(events, description="Updating Events"):
|
|
570
|
+
event_id = cast(str, event.id)
|
|
571
|
+
await _update_event(ctx, await ctx.protect.get_event(event_id))
|
|
572
|
+
return len(events)
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
async def _update_events(ctx: BackupContext) -> int:
|
|
576
|
+
# update any events that are still set as ongoing in the database
|
|
577
|
+
updated_ongoing = await _update_ongoing_events(ctx)
|
|
578
|
+
start = ctx.start
|
|
579
|
+
end = ctx.end or utc_now()
|
|
580
|
+
processed: set[str] = set()
|
|
581
|
+
|
|
582
|
+
total = int((end - ctx.start).total_seconds())
|
|
583
|
+
_LOGGER.debug("total: %s: %s %s", total, start, end)
|
|
584
|
+
|
|
585
|
+
prev_start = start
|
|
586
|
+
with Progress() as pb:
|
|
587
|
+
task_id = pb.add_task("Fetching New Events", total=total)
|
|
588
|
+
task = pb.tasks[0]
|
|
589
|
+
pb.refresh()
|
|
590
|
+
while not pb.finished:
|
|
591
|
+
progress = int((start - prev_start).total_seconds())
|
|
592
|
+
pb.update(task_id, advance=progress)
|
|
593
|
+
_LOGGER.debug(
|
|
594
|
+
"progress: +%s: %s/%s: %s %s",
|
|
595
|
+
progress,
|
|
596
|
+
task.completed,
|
|
597
|
+
task.total,
|
|
598
|
+
start,
|
|
599
|
+
end,
|
|
600
|
+
)
|
|
601
|
+
|
|
602
|
+
events = await ctx.protect.get_events(
|
|
603
|
+
start,
|
|
604
|
+
end,
|
|
605
|
+
limit=100,
|
|
606
|
+
types=[
|
|
607
|
+
d.EventType.MOTION,
|
|
608
|
+
d.EventType.RING,
|
|
609
|
+
d.EventType.SMART_DETECT,
|
|
610
|
+
d.EventType.SMART_DETECT_LINE,
|
|
611
|
+
],
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
prev_start = start
|
|
615
|
+
count = 0
|
|
616
|
+
for event in events:
|
|
617
|
+
start = event.start
|
|
618
|
+
if event.id not in processed:
|
|
619
|
+
count += 1
|
|
620
|
+
processed.add(event.id)
|
|
621
|
+
await _update_event(ctx, event)
|
|
622
|
+
|
|
623
|
+
if start == prev_start and count == 0:
|
|
624
|
+
pb.update(task_id, completed=total)
|
|
625
|
+
|
|
626
|
+
return updated_ongoing + len(processed)
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
async def _download_watcher(
|
|
630
|
+
count: int,
|
|
631
|
+
tasks: _DownloadEventQueue,
|
|
632
|
+
no_error_flag: asyncio.Event,
|
|
633
|
+
) -> int:
|
|
634
|
+
processed = 0
|
|
635
|
+
loop = asyncio.get_running_loop()
|
|
636
|
+
downloaded = 0
|
|
637
|
+
last_print = time.monotonic()
|
|
638
|
+
while processed < count:
|
|
639
|
+
download = await tasks.get()
|
|
640
|
+
task = download.task
|
|
641
|
+
if task is None:
|
|
642
|
+
processed += 1
|
|
643
|
+
continue
|
|
644
|
+
|
|
645
|
+
retries = 0
|
|
646
|
+
while True:
|
|
647
|
+
try:
|
|
648
|
+
await task
|
|
649
|
+
except asyncio.CancelledError:
|
|
650
|
+
return downloaded
|
|
651
|
+
except Exception:
|
|
652
|
+
pass
|
|
653
|
+
|
|
654
|
+
event: Event = download.args[1]
|
|
655
|
+
if exception := task.exception():
|
|
656
|
+
no_error_flag.clear()
|
|
657
|
+
if retries < 5:
|
|
658
|
+
wait = math.pow(2, retries)
|
|
659
|
+
_LOGGER.warning(
|
|
660
|
+
"Exception while downloading event (%s): %s. Retring in %s second(s)",
|
|
661
|
+
event.id,
|
|
662
|
+
exception,
|
|
663
|
+
wait,
|
|
664
|
+
)
|
|
665
|
+
await asyncio.sleep(wait)
|
|
666
|
+
retries += 1
|
|
667
|
+
task = loop.create_task(_download_event(*download.args))
|
|
668
|
+
else:
|
|
669
|
+
_LOGGER.error("Failed to download event %s", event.id)
|
|
670
|
+
|
|
671
|
+
if exception is None or retries >= 5:
|
|
672
|
+
no_error_flag.set()
|
|
673
|
+
processed += 1
|
|
674
|
+
now = time.monotonic()
|
|
675
|
+
if now - last_print > 60:
|
|
676
|
+
_LOGGER.info(
|
|
677
|
+
"Processed %s/%s (%.2f%%) events",
|
|
678
|
+
processed,
|
|
679
|
+
count,
|
|
680
|
+
processed / count,
|
|
681
|
+
)
|
|
682
|
+
last_print = now
|
|
683
|
+
if exception is None and task.result():
|
|
684
|
+
downloaded += 1
|
|
685
|
+
break
|
|
686
|
+
return downloaded
|
|
687
|
+
|
|
688
|
+
|
|
689
|
+
def _verify_thumbnail(path: Path) -> bool:
|
|
690
|
+
try:
|
|
691
|
+
image = Image.open(path)
|
|
692
|
+
image.verify()
|
|
693
|
+
# no docs on what exception could be
|
|
694
|
+
except Exception:
|
|
695
|
+
return False
|
|
696
|
+
return True
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
async def _download_event_thumb(
|
|
700
|
+
ctx: BackupContext,
|
|
701
|
+
event: Event,
|
|
702
|
+
verify: bool,
|
|
703
|
+
force: bool,
|
|
704
|
+
animated: bool = False,
|
|
705
|
+
) -> bool:
|
|
706
|
+
if animated:
|
|
707
|
+
thumb_type = "gif"
|
|
708
|
+
thumb_path = event.get_gif_path(ctx)
|
|
709
|
+
existing_thumb_path = event.get_existing_gif_path(ctx)
|
|
710
|
+
else:
|
|
711
|
+
thumb_type = "thumbnail"
|
|
712
|
+
thumb_path = event.get_thumbnail_path(ctx)
|
|
713
|
+
existing_thumb_path = event.get_existing_thumbnail_path(ctx)
|
|
714
|
+
|
|
715
|
+
if force and existing_thumb_path:
|
|
716
|
+
_LOGGER.debug("Delete file %s", existing_thumb_path)
|
|
717
|
+
await aos.remove(existing_thumb_path)
|
|
718
|
+
|
|
719
|
+
if existing_thumb_path and str(existing_thumb_path) != str(thumb_path):
|
|
720
|
+
_LOGGER.debug(
|
|
721
|
+
"Rename event %s file %s: %s %s %s: %s",
|
|
722
|
+
thumb_type,
|
|
723
|
+
event.id,
|
|
724
|
+
event.start,
|
|
725
|
+
event.end,
|
|
726
|
+
event.event_type,
|
|
727
|
+
thumb_path,
|
|
728
|
+
)
|
|
729
|
+
await aos.makedirs(thumb_path.parent, exist_ok=True)
|
|
730
|
+
await aos.rename(existing_thumb_path, thumb_path)
|
|
731
|
+
|
|
732
|
+
if (
|
|
733
|
+
verify
|
|
734
|
+
and thumb_path.exists()
|
|
735
|
+
and not await asyncio.get_running_loop().run_in_executor(
|
|
736
|
+
None, _verify_thumbnail, thumb_path
|
|
737
|
+
)
|
|
738
|
+
):
|
|
739
|
+
_LOGGER.warning(
|
|
740
|
+
"Corrupted event %s file for event (%s), redownloading",
|
|
741
|
+
thumb_type,
|
|
742
|
+
event.id,
|
|
743
|
+
)
|
|
744
|
+
await aos.remove(thumb_path)
|
|
745
|
+
|
|
746
|
+
if not thumb_path.exists():
|
|
747
|
+
_LOGGER.debug(
|
|
748
|
+
"Download event %s %s: %s %s: %s",
|
|
749
|
+
thumb_type,
|
|
750
|
+
event.id,
|
|
751
|
+
event.start,
|
|
752
|
+
event.event_type,
|
|
753
|
+
thumb_path,
|
|
754
|
+
)
|
|
755
|
+
event_id = str(event.id)
|
|
756
|
+
if animated:
|
|
757
|
+
thumbnail = await ctx.protect.get_event_animated_thumbnail(event_id)
|
|
758
|
+
else:
|
|
759
|
+
thumbnail = await ctx.protect.get_event_thumbnail(event_id)
|
|
760
|
+
if thumbnail is not None:
|
|
761
|
+
await aos.makedirs(thumb_path.parent, exist_ok=True)
|
|
762
|
+
async with aiofiles.open(thumb_path, mode="wb") as f:
|
|
763
|
+
await f.write(thumbnail)
|
|
764
|
+
return True
|
|
765
|
+
return False
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
def _verify_video_file( # type: ignore[return]
|
|
769
|
+
path: Path,
|
|
770
|
+
length: float,
|
|
771
|
+
width: int,
|
|
772
|
+
height: int,
|
|
773
|
+
title: str,
|
|
774
|
+
) -> tuple[bool, bool]:
|
|
775
|
+
try:
|
|
776
|
+
with av.open(str(path)) as video:
|
|
777
|
+
slength = float(
|
|
778
|
+
video.streams.video[0].duration * video.streams.video[0].time_base, # type: ignore[operator]
|
|
779
|
+
)
|
|
780
|
+
valid = (
|
|
781
|
+
(slength / length) > 0.80 # export is fuzzy
|
|
782
|
+
and video.streams.video[0].codec_context.width == width
|
|
783
|
+
and video.streams.video[0].codec_context.height == height
|
|
784
|
+
)
|
|
785
|
+
metadata_valid = False
|
|
786
|
+
if valid:
|
|
787
|
+
metadata_valid = bool(video.metadata["title"] == title)
|
|
788
|
+
return valid, metadata_valid
|
|
789
|
+
|
|
790
|
+
# no docs on what exception could be
|
|
791
|
+
except Exception:
|
|
792
|
+
return False, False
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
def _add_metadata(path: Path, creation: datetime, title: str) -> bool:
|
|
796
|
+
creation = local_datetime(creation)
|
|
797
|
+
output_path = path.parent / path.name.replace(".mp4", ".metadata.mp4")
|
|
798
|
+
|
|
799
|
+
success = True
|
|
800
|
+
try:
|
|
801
|
+
with (
|
|
802
|
+
av.open(str(path)) as input_file,
|
|
803
|
+
av.open(str(output_path), "w") as output_file,
|
|
804
|
+
):
|
|
805
|
+
for key, value in input_file.metadata.items():
|
|
806
|
+
output_file.metadata[key] = value
|
|
807
|
+
output_file.metadata["creation_time"] = creation.isoformat()
|
|
808
|
+
output_file.metadata["title"] = title
|
|
809
|
+
output_file.metadata["year"] = creation.date().isoformat()
|
|
810
|
+
output_file.metadata["release"] = creation.date().isoformat()
|
|
811
|
+
|
|
812
|
+
in_to_out: dict[str, Any] = {}
|
|
813
|
+
for stream in input_file.streams:
|
|
814
|
+
in_to_out[stream] = output_file.add_stream(template=stream) # type: ignore[index]
|
|
815
|
+
in_to_out[stream].metadata["creation_time"] = creation.isoformat() # type: ignore[index]
|
|
816
|
+
|
|
817
|
+
for packet in input_file.demux(list(in_to_out.keys())):
|
|
818
|
+
if packet.dts is None:
|
|
819
|
+
continue
|
|
820
|
+
|
|
821
|
+
packet.stream = in_to_out[packet.stream] # type: ignore[index]
|
|
822
|
+
try:
|
|
823
|
+
output_file.mux(packet) # type: ignore[arg-type]
|
|
824
|
+
# some frames may be corrupted on disk from NVR
|
|
825
|
+
except ValueError:
|
|
826
|
+
continue
|
|
827
|
+
# no docs on what exception could be
|
|
828
|
+
except Exception:
|
|
829
|
+
success = False
|
|
830
|
+
finally:
|
|
831
|
+
if success:
|
|
832
|
+
os.remove(path)
|
|
833
|
+
output_path.rename(path)
|
|
834
|
+
elif output_path.exists():
|
|
835
|
+
os.remove(output_path)
|
|
836
|
+
return success
|
|
837
|
+
|
|
838
|
+
|
|
839
|
+
async def _download_event_video(
|
|
840
|
+
ctx: BackupContext,
|
|
841
|
+
camera: d.Camera,
|
|
842
|
+
event: Event,
|
|
843
|
+
verify: bool,
|
|
844
|
+
force: bool,
|
|
845
|
+
) -> bool:
|
|
846
|
+
event_path = event.get_event_path(ctx)
|
|
847
|
+
existing_event_path = event.get_existing_event_path(ctx)
|
|
848
|
+
if force and existing_event_path:
|
|
849
|
+
_LOGGER.debug("Delete file %s", existing_event_path)
|
|
850
|
+
await aos.remove(existing_event_path)
|
|
851
|
+
|
|
852
|
+
if existing_event_path and str(existing_event_path) != str(event_path):
|
|
853
|
+
_LOGGER.debug(
|
|
854
|
+
"Rename event file %s: %s %s %s: %s",
|
|
855
|
+
event.id,
|
|
856
|
+
event.start,
|
|
857
|
+
event.end,
|
|
858
|
+
event.event_type,
|
|
859
|
+
event_path,
|
|
860
|
+
)
|
|
861
|
+
await aos.makedirs(event_path.parent, exist_ok=True)
|
|
862
|
+
await aos.rename(existing_event_path, event_path)
|
|
863
|
+
|
|
864
|
+
metadata_valid = True
|
|
865
|
+
if verify and event_path.exists():
|
|
866
|
+
valid = False
|
|
867
|
+
if event.end is not None:
|
|
868
|
+
valid, metadata_valid = await asyncio.get_running_loop().run_in_executor(
|
|
869
|
+
None,
|
|
870
|
+
_verify_video_file,
|
|
871
|
+
event_path,
|
|
872
|
+
(event.end - event.start).total_seconds(),
|
|
873
|
+
camera.channels[0].width,
|
|
874
|
+
camera.channels[0].height,
|
|
875
|
+
event.get_file_context(ctx)["title"],
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
if not valid:
|
|
879
|
+
_LOGGER.warning(
|
|
880
|
+
"Corrupted video file for event (%s), redownloading",
|
|
881
|
+
event.id,
|
|
882
|
+
)
|
|
883
|
+
await aos.remove(event_path)
|
|
884
|
+
|
|
885
|
+
downloaded = False
|
|
886
|
+
if not event_path.exists() and event.end is not None:
|
|
887
|
+
_LOGGER.debug(
|
|
888
|
+
"Download event %s: %s %s %s: %s",
|
|
889
|
+
event.id,
|
|
890
|
+
event.start,
|
|
891
|
+
event.end,
|
|
892
|
+
event.event_type,
|
|
893
|
+
event_path,
|
|
894
|
+
)
|
|
895
|
+
await aos.makedirs(event_path.parent, exist_ok=True)
|
|
896
|
+
await camera.get_video(event.start, event.end, output_file=event_path)
|
|
897
|
+
downloaded = True
|
|
898
|
+
|
|
899
|
+
if (downloaded or not metadata_valid) and event.end is not None:
|
|
900
|
+
file_context = event.get_file_context(ctx)
|
|
901
|
+
if not await asyncio.get_running_loop().run_in_executor(
|
|
902
|
+
None, _add_metadata, event_path, event.start, file_context["title"]
|
|
903
|
+
):
|
|
904
|
+
_LOGGER.warning("Failed to write metadata for event (%s)", event.id)
|
|
905
|
+
return downloaded
|
|
906
|
+
|
|
907
|
+
|
|
908
|
+
async def _download_event(
|
|
909
|
+
ctx: BackupContext,
|
|
910
|
+
event: Event,
|
|
911
|
+
verify: bool,
|
|
912
|
+
force: bool,
|
|
913
|
+
pb: Progress,
|
|
914
|
+
) -> bool:
|
|
915
|
+
downloaded = False
|
|
916
|
+
camera = ctx.protect.bootstrap.get_device_from_mac(event.camera_mac) # type: ignore[arg-type]
|
|
917
|
+
if camera is not None:
|
|
918
|
+
camera = cast(d.Camera, camera)
|
|
919
|
+
downloads = []
|
|
920
|
+
if ctx.download_thumbnails:
|
|
921
|
+
downloads.append(_download_event_thumb(ctx, event, verify, force))
|
|
922
|
+
if ctx.download_gifs:
|
|
923
|
+
downloads.append(
|
|
924
|
+
_download_event_thumb(ctx, event, verify, force, animated=True),
|
|
925
|
+
)
|
|
926
|
+
if ctx.download_thumbnails:
|
|
927
|
+
downloads.append(_download_event_video(ctx, camera, event, verify, force))
|
|
928
|
+
|
|
929
|
+
downloaded = any(await asyncio.gather(*downloads))
|
|
930
|
+
pb.update(pb.tasks[0].id, advance=1)
|
|
931
|
+
return downloaded
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
# TODO
|
|
935
|
+
async def _download_events(
|
|
936
|
+
ctx: BackupContext,
|
|
937
|
+
event_types: list[d.EventType],
|
|
938
|
+
smart_types: list[d.SmartDetectObjectType],
|
|
939
|
+
verify: bool,
|
|
940
|
+
force: bool,
|
|
941
|
+
) -> tuple[int, int]:
|
|
942
|
+
start = ctx.start
|
|
943
|
+
end = ctx.end or utc_now()
|
|
944
|
+
db = ctx.create_db_session()
|
|
945
|
+
async with db:
|
|
946
|
+
count_query = (
|
|
947
|
+
select(func.count(Event.id))
|
|
948
|
+
.where(Event.event_type.in_([e.value for e in event_types]))
|
|
949
|
+
.where(Event.start_naive >= start)
|
|
950
|
+
.where(or_(Event.end_naive <= end, Event.end_naive is None)) # type: ignore[arg-type]
|
|
951
|
+
)
|
|
952
|
+
count = cast(int, (await db.execute(count_query)).scalar())
|
|
953
|
+
_LOGGER.info("Downloading %s events", count)
|
|
954
|
+
|
|
955
|
+
columns = [
|
|
956
|
+
TextColumn("[progress.description]{task.description}"),
|
|
957
|
+
BarColumn(),
|
|
958
|
+
TaskProgressColumn(),
|
|
959
|
+
MofNCompleteColumn(),
|
|
960
|
+
TimeRemainingColumn(),
|
|
961
|
+
]
|
|
962
|
+
with Progress(*columns) as pb:
|
|
963
|
+
task_id = pb.add_task("Downloading Events", total=count)
|
|
964
|
+
|
|
965
|
+
query = (
|
|
966
|
+
select(Event)
|
|
967
|
+
.where(Event.event_type.in_([e.value for e in event_types]))
|
|
968
|
+
.where(Event.start_naive >= start)
|
|
969
|
+
.where(or_(Event.end_naive <= end, Event.end_naive is None)) # type: ignore[arg-type]
|
|
970
|
+
.limit(ctx.page_size)
|
|
971
|
+
)
|
|
972
|
+
smart_types_set = {s.value for s in smart_types}
|
|
973
|
+
loop = asyncio.get_running_loop()
|
|
974
|
+
tasks: _DownloadEventQueue = asyncio.Queue(maxsize=ctx.max_download - 1)
|
|
975
|
+
no_error_flag = asyncio.Event()
|
|
976
|
+
no_error_flag.set()
|
|
977
|
+
watcher_task = loop.create_task(
|
|
978
|
+
_download_watcher(count, tasks, no_error_flag),
|
|
979
|
+
)
|
|
980
|
+
|
|
981
|
+
offset = 0
|
|
982
|
+
page = query
|
|
983
|
+
while offset < count:
|
|
984
|
+
result = await db.execute(page)
|
|
985
|
+
for event in result.unique().scalars():
|
|
986
|
+
if event.end is None:
|
|
987
|
+
continue
|
|
988
|
+
|
|
989
|
+
length = event.end - event.start
|
|
990
|
+
if length > ctx.length_cutoff:
|
|
991
|
+
_LOGGER.warning(
|
|
992
|
+
"Skipping event %s because it is too long (%s)",
|
|
993
|
+
event.id,
|
|
994
|
+
length,
|
|
995
|
+
)
|
|
996
|
+
await tasks.put(QueuedDownload(task=None, args=[]))
|
|
997
|
+
continue
|
|
998
|
+
# ensure no tasks are currently in a retry state
|
|
999
|
+
await no_error_flag.wait()
|
|
1000
|
+
|
|
1001
|
+
if event.event_type in {
|
|
1002
|
+
d.EventType.SMART_DETECT.value,
|
|
1003
|
+
d.EventType.SMART_DETECT_LINE.value,
|
|
1004
|
+
} and not event.smart_types.intersection(smart_types_set):
|
|
1005
|
+
continue
|
|
1006
|
+
|
|
1007
|
+
task = loop.create_task(
|
|
1008
|
+
_download_event(ctx, event, verify, force, pb),
|
|
1009
|
+
)
|
|
1010
|
+
# waits for a free processing slot
|
|
1011
|
+
await tasks.put(
|
|
1012
|
+
QueuedDownload(task=task, args=[ctx, event, verify, force, pb]),
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
offset += ctx.page_size
|
|
1016
|
+
page = query.offset(offset)
|
|
1017
|
+
|
|
1018
|
+
try:
|
|
1019
|
+
await watcher_task
|
|
1020
|
+
downloaded = watcher_task.result()
|
|
1021
|
+
except asyncio.CancelledError:
|
|
1022
|
+
downloaded = 0
|
|
1023
|
+
pb.update(task_id, completed=count)
|
|
1024
|
+
return count, downloaded
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
async def _events(
|
|
1028
|
+
ctx: BackupContext,
|
|
1029
|
+
event_types: list[d.EventType],
|
|
1030
|
+
smart_types: list[d.SmartDetectObjectType],
|
|
1031
|
+
prune: bool,
|
|
1032
|
+
force: bool,
|
|
1033
|
+
verify: bool,
|
|
1034
|
+
no_input: bool,
|
|
1035
|
+
) -> None:
|
|
1036
|
+
try:
|
|
1037
|
+
await ctx.create_db()
|
|
1038
|
+
|
|
1039
|
+
if prune and not force:
|
|
1040
|
+
_LOGGER.warning("Pruned %s old event(s)", await _prune_events(ctx))
|
|
1041
|
+
|
|
1042
|
+
original_start = ctx.start
|
|
1043
|
+
if not force:
|
|
1044
|
+
event = await _newest_event(ctx)
|
|
1045
|
+
if event is not None:
|
|
1046
|
+
ctx.start = event.start
|
|
1047
|
+
|
|
1048
|
+
_LOGGER.warning("Updated %s event(s)", await _update_events(ctx))
|
|
1049
|
+
ctx.start = original_start
|
|
1050
|
+
count, downloaded = await _download_events(
|
|
1051
|
+
ctx,
|
|
1052
|
+
event_types,
|
|
1053
|
+
smart_types,
|
|
1054
|
+
verify,
|
|
1055
|
+
force,
|
|
1056
|
+
)
|
|
1057
|
+
verified = count - downloaded
|
|
1058
|
+
_LOGGER.warning(
|
|
1059
|
+
"Total events: %s. Verified %s existing event(s). Downloaded %s new event(s)",
|
|
1060
|
+
count,
|
|
1061
|
+
verified,
|
|
1062
|
+
downloaded,
|
|
1063
|
+
)
|
|
1064
|
+
finally:
|
|
1065
|
+
_LOGGER.debug("Cleaning up Protect connection/database...")
|
|
1066
|
+
await ctx.protect.close_session()
|
|
1067
|
+
await ctx.db_engine.dispose()
|
|
1068
|
+
|
|
1069
|
+
|
|
1070
|
+
@app.command(name="events")
|
|
1071
|
+
def events_cmd(
|
|
1072
|
+
ctx: typer.Context,
|
|
1073
|
+
event_types: list[EventTypeChoice] = OPTION_EVENT_TYPES,
|
|
1074
|
+
smart_types: list[d.SmartDetectObjectType] = OPTION_SMART_TYPES,
|
|
1075
|
+
prune: bool = typer.Option(
|
|
1076
|
+
False,
|
|
1077
|
+
"-p",
|
|
1078
|
+
"--prune",
|
|
1079
|
+
help="Prune events older then start.",
|
|
1080
|
+
),
|
|
1081
|
+
force: bool = typer.Option(
|
|
1082
|
+
False,
|
|
1083
|
+
"-f",
|
|
1084
|
+
"--force",
|
|
1085
|
+
help="Force update all events and redownload all clips.",
|
|
1086
|
+
),
|
|
1087
|
+
verify: bool = typer.Option(
|
|
1088
|
+
False,
|
|
1089
|
+
"-v",
|
|
1090
|
+
"--verify",
|
|
1091
|
+
help="Verifies files on disk.",
|
|
1092
|
+
),
|
|
1093
|
+
no_input: bool = typer.Option(False, "--no-input"),
|
|
1094
|
+
) -> None:
|
|
1095
|
+
"""Backup thumbnails and video clips for camera events."""
|
|
1096
|
+
# surpress av logging messages
|
|
1097
|
+
av.logging.set_level(av.logging.PANIC)
|
|
1098
|
+
ufp_events = [d.EventType(e.value) for e in event_types]
|
|
1099
|
+
if prune and force:
|
|
1100
|
+
_wipe_files(ctx.obj, no_input)
|
|
1101
|
+
asyncio.run(
|
|
1102
|
+
_events(ctx.obj, ufp_events, smart_types, prune, force, verify, no_input),
|
|
1103
|
+
)
|