homesec 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- homesec/__init__.py +20 -0
- homesec/app.py +393 -0
- homesec/cli.py +159 -0
- homesec/config/__init__.py +18 -0
- homesec/config/loader.py +109 -0
- homesec/config/validation.py +82 -0
- homesec/errors.py +71 -0
- homesec/health/__init__.py +5 -0
- homesec/health/server.py +226 -0
- homesec/interfaces.py +249 -0
- homesec/logging_setup.py +176 -0
- homesec/maintenance/__init__.py +1 -0
- homesec/maintenance/cleanup_clips.py +632 -0
- homesec/models/__init__.py +79 -0
- homesec/models/alert.py +32 -0
- homesec/models/clip.py +71 -0
- homesec/models/config.py +362 -0
- homesec/models/events.py +184 -0
- homesec/models/filter.py +62 -0
- homesec/models/source.py +77 -0
- homesec/models/storage.py +12 -0
- homesec/models/vlm.py +99 -0
- homesec/pipeline/__init__.py +6 -0
- homesec/pipeline/alert_policy.py +5 -0
- homesec/pipeline/core.py +639 -0
- homesec/plugins/__init__.py +62 -0
- homesec/plugins/alert_policies/__init__.py +80 -0
- homesec/plugins/alert_policies/default.py +111 -0
- homesec/plugins/alert_policies/noop.py +60 -0
- homesec/plugins/analyzers/__init__.py +126 -0
- homesec/plugins/analyzers/openai.py +446 -0
- homesec/plugins/filters/__init__.py +124 -0
- homesec/plugins/filters/yolo.py +317 -0
- homesec/plugins/notifiers/__init__.py +80 -0
- homesec/plugins/notifiers/mqtt.py +189 -0
- homesec/plugins/notifiers/multiplex.py +106 -0
- homesec/plugins/notifiers/sendgrid_email.py +228 -0
- homesec/plugins/storage/__init__.py +116 -0
- homesec/plugins/storage/dropbox.py +272 -0
- homesec/plugins/storage/local.py +108 -0
- homesec/plugins/utils.py +63 -0
- homesec/py.typed +0 -0
- homesec/repository/__init__.py +5 -0
- homesec/repository/clip_repository.py +552 -0
- homesec/sources/__init__.py +17 -0
- homesec/sources/base.py +224 -0
- homesec/sources/ftp.py +209 -0
- homesec/sources/local_folder.py +238 -0
- homesec/sources/rtsp.py +1251 -0
- homesec/state/__init__.py +10 -0
- homesec/state/postgres.py +501 -0
- homesec/storage_paths.py +46 -0
- homesec/telemetry/__init__.py +0 -0
- homesec/telemetry/db/__init__.py +1 -0
- homesec/telemetry/db/log_table.py +16 -0
- homesec/telemetry/db_log_handler.py +246 -0
- homesec/telemetry/postgres_settings.py +42 -0
- homesec-0.1.0.dist-info/METADATA +446 -0
- homesec-0.1.0.dist-info/RECORD +62 -0
- homesec-0.1.0.dist-info/WHEEL +4 -0
- homesec-0.1.0.dist-info/entry_points.txt +2 -0
- homesec-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,632 @@
|
|
|
1
|
+
"""Cleanup workflow for removing clips that appear empty after re-analysis.
|
|
2
|
+
|
|
3
|
+
This module is intended to be run via the HomeSec CLI (`homesec cleanup`).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
import time
|
|
12
|
+
import uuid
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
from pydantic import BaseModel, Field
|
|
18
|
+
|
|
19
|
+
from homesec.config import load_config, resolve_env_var
|
|
20
|
+
from homesec.interfaces import ObjectFilter, StorageBackend
|
|
21
|
+
from homesec.models.clip import ClipStateData
|
|
22
|
+
from homesec.models.filter import FilterConfig, YoloFilterSettings
|
|
23
|
+
from homesec.plugins import discover_all_plugins
|
|
24
|
+
from homesec.plugins.filters import load_filter_plugin
|
|
25
|
+
from homesec.plugins.storage import create_storage
|
|
26
|
+
from homesec.repository.clip_repository import ClipRepository
|
|
27
|
+
from homesec.state.postgres import PostgresStateStore
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger("homesec.cleanup_clips")
|
|
30
|
+
|
|
31
|
+
_DEFAULT_RECHECK_MODEL = "yolo11x.pt"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CleanupOptions(BaseModel):
|
|
35
|
+
"""Options for the cleanup workflow (CLI-facing)."""
|
|
36
|
+
|
|
37
|
+
config_path: Path
|
|
38
|
+
|
|
39
|
+
older_than_days: int | None = None
|
|
40
|
+
camera_name: str | None = None
|
|
41
|
+
|
|
42
|
+
batch_size: int = Field(default=100, ge=1)
|
|
43
|
+
workers: int = Field(default=2, ge=1)
|
|
44
|
+
dry_run: bool = True
|
|
45
|
+
|
|
46
|
+
recheck_model_path: str | None = None
|
|
47
|
+
recheck_min_confidence: float | None = None
|
|
48
|
+
recheck_sample_fps: int | None = None
|
|
49
|
+
recheck_min_box_h_ratio: float | None = None
|
|
50
|
+
recheck_min_hits: int | None = None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass(frozen=True)
|
|
54
|
+
class _Counts:
|
|
55
|
+
scanned_rows: int = 0
|
|
56
|
+
candidates: int = 0
|
|
57
|
+
reanalyzed: int = 0
|
|
58
|
+
deleted: int = 0
|
|
59
|
+
false_negatives: int = 0
|
|
60
|
+
download_errors: int = 0
|
|
61
|
+
analyze_errors: int = 0
|
|
62
|
+
delete_errors: int = 0
|
|
63
|
+
state_errors: int = 0
|
|
64
|
+
|
|
65
|
+
def __add__(self, other: "_Counts") -> "_Counts":
|
|
66
|
+
return _Counts(
|
|
67
|
+
scanned_rows=self.scanned_rows + other.scanned_rows,
|
|
68
|
+
candidates=self.candidates + other.candidates,
|
|
69
|
+
reanalyzed=self.reanalyzed + other.reanalyzed,
|
|
70
|
+
deleted=self.deleted + other.deleted,
|
|
71
|
+
false_negatives=self.false_negatives + other.false_negatives,
|
|
72
|
+
download_errors=self.download_errors + other.download_errors,
|
|
73
|
+
analyze_errors=self.analyze_errors + other.analyze_errors,
|
|
74
|
+
delete_errors=self.delete_errors + other.delete_errors,
|
|
75
|
+
state_errors=self.state_errors + other.state_errors,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _safe_filename(value: str) -> str:
|
|
80
|
+
return value.replace("/", "_").replace(" ", "_")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _log_json(level: int, message: str, payload: dict[str, object]) -> None:
|
|
84
|
+
if "message" not in payload:
|
|
85
|
+
payload = {"message": message, **payload}
|
|
86
|
+
logger.log(level, json.dumps(payload, sort_keys=True))
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _base_payload(
|
|
90
|
+
*,
|
|
91
|
+
run_id: str,
|
|
92
|
+
event: str,
|
|
93
|
+
clip_id: str | None = None,
|
|
94
|
+
camera_name: str | None = None,
|
|
95
|
+
created_at: datetime | None = None,
|
|
96
|
+
dry_run: bool | None = None,
|
|
97
|
+
status_before: str | None = None,
|
|
98
|
+
status_after: str | None = None,
|
|
99
|
+
) -> dict[str, object]:
|
|
100
|
+
payload: dict[str, object] = {"event": event, "run_id": run_id}
|
|
101
|
+
if clip_id is not None:
|
|
102
|
+
payload["clip_id"] = clip_id
|
|
103
|
+
if camera_name is not None:
|
|
104
|
+
payload["camera_name"] = camera_name
|
|
105
|
+
if created_at is not None:
|
|
106
|
+
payload["created_at"] = created_at.isoformat()
|
|
107
|
+
if dry_run is not None:
|
|
108
|
+
payload["dry_run"] = bool(dry_run)
|
|
109
|
+
if status_before is not None:
|
|
110
|
+
payload["status_before"] = status_before
|
|
111
|
+
if status_after is not None:
|
|
112
|
+
payload["status_after"] = status_after
|
|
113
|
+
return payload
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _recheck_settings(config: FilterConfig) -> dict[str, object]:
|
|
117
|
+
match config.config:
|
|
118
|
+
case YoloFilterSettings() as settings:
|
|
119
|
+
return {
|
|
120
|
+
"model_path": str(settings.model_path),
|
|
121
|
+
"min_confidence": float(settings.min_confidence),
|
|
122
|
+
"sample_fps": int(settings.sample_fps),
|
|
123
|
+
"min_box_h_ratio": float(settings.min_box_h_ratio),
|
|
124
|
+
"min_hits": int(settings.min_hits),
|
|
125
|
+
"classes": list(settings.classes),
|
|
126
|
+
}
|
|
127
|
+
case _:
|
|
128
|
+
return {}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _build_recheck_filter_config(base: FilterConfig, opts: CleanupOptions) -> FilterConfig:
|
|
132
|
+
match base.config:
|
|
133
|
+
case YoloFilterSettings() as yolo:
|
|
134
|
+
settings = yolo.model_copy(deep=True)
|
|
135
|
+
case _:
|
|
136
|
+
raise ValueError(f"Unsupported filter config type: {type(base.config).__name__}")
|
|
137
|
+
|
|
138
|
+
settings.model_path = opts.recheck_model_path or _DEFAULT_RECHECK_MODEL
|
|
139
|
+
if opts.recheck_min_confidence is not None:
|
|
140
|
+
settings.min_confidence = opts.recheck_min_confidence
|
|
141
|
+
if opts.recheck_sample_fps is not None:
|
|
142
|
+
settings.sample_fps = opts.recheck_sample_fps
|
|
143
|
+
if opts.recheck_min_box_h_ratio is not None:
|
|
144
|
+
settings.min_box_h_ratio = opts.recheck_min_box_h_ratio
|
|
145
|
+
if opts.recheck_min_hits is not None:
|
|
146
|
+
settings.min_hits = opts.recheck_min_hits
|
|
147
|
+
|
|
148
|
+
merged = base.model_copy(deep=True)
|
|
149
|
+
merged.max_workers = int(opts.workers)
|
|
150
|
+
merged.config = settings
|
|
151
|
+
return merged
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
async def run_cleanup(opts: CleanupOptions) -> None:
|
|
155
|
+
"""Run the cleanup workflow."""
|
|
156
|
+
|
|
157
|
+
run_id = str(uuid.uuid4())
|
|
158
|
+
|
|
159
|
+
cfg = load_config(opts.config_path)
|
|
160
|
+
|
|
161
|
+
# Discover all plugins (built-in and external)
|
|
162
|
+
discover_all_plugins()
|
|
163
|
+
|
|
164
|
+
state_cfg = cfg.state_store
|
|
165
|
+
dsn = state_cfg.dsn
|
|
166
|
+
if state_cfg.dsn_env:
|
|
167
|
+
dsn = resolve_env_var(state_cfg.dsn_env)
|
|
168
|
+
if not dsn:
|
|
169
|
+
raise RuntimeError("Postgres DSN is required for cleanup")
|
|
170
|
+
|
|
171
|
+
storage = create_storage(cfg.storage)
|
|
172
|
+
state_store = PostgresStateStore(dsn)
|
|
173
|
+
ok = await state_store.initialize()
|
|
174
|
+
if not ok:
|
|
175
|
+
raise RuntimeError("Failed to initialize Postgres state store")
|
|
176
|
+
|
|
177
|
+
event_store = state_store.create_event_store()
|
|
178
|
+
repo = ClipRepository(state_store, event_store, retry=cfg.retry)
|
|
179
|
+
|
|
180
|
+
recheck_cfg = _build_recheck_filter_config(cfg.filter, opts)
|
|
181
|
+
filter_plugin = load_filter_plugin(recheck_cfg)
|
|
182
|
+
|
|
183
|
+
sem = asyncio.Semaphore(int(opts.workers))
|
|
184
|
+
|
|
185
|
+
cache_dir = Path.cwd() / "video_cache" / "cleanup" / run_id
|
|
186
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
187
|
+
|
|
188
|
+
totals = _Counts()
|
|
189
|
+
|
|
190
|
+
try:
|
|
191
|
+
cursor: tuple[datetime, str] | None = None
|
|
192
|
+
while True:
|
|
193
|
+
rows = await repo.list_candidate_clips_for_cleanup(
|
|
194
|
+
older_than_days=opts.older_than_days,
|
|
195
|
+
camera_name=opts.camera_name,
|
|
196
|
+
batch_size=int(opts.batch_size),
|
|
197
|
+
cursor=cursor,
|
|
198
|
+
)
|
|
199
|
+
if not rows:
|
|
200
|
+
break
|
|
201
|
+
last_clip_id, _last_state, last_created_at = rows[-1]
|
|
202
|
+
cursor = (last_created_at, last_clip_id)
|
|
203
|
+
|
|
204
|
+
totals = totals + _Counts(scanned_rows=len(rows))
|
|
205
|
+
|
|
206
|
+
candidates: list[tuple[str, ClipStateData, datetime]] = [
|
|
207
|
+
(clip_id, state, created_at)
|
|
208
|
+
for clip_id, state, created_at in rows
|
|
209
|
+
if state.filter_result is not None
|
|
210
|
+
and not state.filter_result.detected_classes
|
|
211
|
+
]
|
|
212
|
+
totals = totals + _Counts(candidates=len(candidates))
|
|
213
|
+
|
|
214
|
+
tasks = [
|
|
215
|
+
asyncio.create_task(
|
|
216
|
+
_process_candidate(
|
|
217
|
+
clip_id=clip_id,
|
|
218
|
+
state=state,
|
|
219
|
+
created_at=created_at,
|
|
220
|
+
recheck_cfg=recheck_cfg,
|
|
221
|
+
filter_plugin=filter_plugin,
|
|
222
|
+
storage=storage,
|
|
223
|
+
repo=repo,
|
|
224
|
+
cache_dir=cache_dir,
|
|
225
|
+
sem=sem,
|
|
226
|
+
dry_run=bool(opts.dry_run),
|
|
227
|
+
run_id=run_id,
|
|
228
|
+
)
|
|
229
|
+
)
|
|
230
|
+
for clip_id, state, created_at in candidates
|
|
231
|
+
]
|
|
232
|
+
if tasks:
|
|
233
|
+
results = await asyncio.gather(*tasks)
|
|
234
|
+
for c in results:
|
|
235
|
+
totals = totals + c
|
|
236
|
+
|
|
237
|
+
summary_payload = _base_payload(
|
|
238
|
+
run_id=run_id,
|
|
239
|
+
event="cleanup.summary",
|
|
240
|
+
dry_run=bool(opts.dry_run),
|
|
241
|
+
)
|
|
242
|
+
summary_payload.update(
|
|
243
|
+
{
|
|
244
|
+
"scanned_rows": totals.scanned_rows,
|
|
245
|
+
"candidates": totals.candidates,
|
|
246
|
+
"reanalyzed": totals.reanalyzed,
|
|
247
|
+
"deleted": totals.deleted,
|
|
248
|
+
"false_negatives": totals.false_negatives,
|
|
249
|
+
"download_errors": totals.download_errors,
|
|
250
|
+
"analyze_errors": totals.analyze_errors,
|
|
251
|
+
"delete_errors": totals.delete_errors,
|
|
252
|
+
"state_errors": totals.state_errors,
|
|
253
|
+
"filters": {
|
|
254
|
+
"older_than_days": opts.older_than_days,
|
|
255
|
+
"camera_name": opts.camera_name,
|
|
256
|
+
},
|
|
257
|
+
"recheck_settings": _recheck_settings(recheck_cfg),
|
|
258
|
+
"batch_size": int(opts.batch_size),
|
|
259
|
+
"workers": int(opts.workers),
|
|
260
|
+
}
|
|
261
|
+
)
|
|
262
|
+
_log_json(logging.INFO, "Cleanup summary", summary_payload)
|
|
263
|
+
finally:
|
|
264
|
+
try:
|
|
265
|
+
await filter_plugin.shutdown()
|
|
266
|
+
finally:
|
|
267
|
+
await storage.shutdown()
|
|
268
|
+
await state_store.shutdown()
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
async def _process_candidate(
|
|
272
|
+
*,
|
|
273
|
+
clip_id: str,
|
|
274
|
+
state: ClipStateData,
|
|
275
|
+
created_at: datetime,
|
|
276
|
+
recheck_cfg: FilterConfig,
|
|
277
|
+
filter_plugin: ObjectFilter,
|
|
278
|
+
storage: StorageBackend,
|
|
279
|
+
repo: ClipRepository,
|
|
280
|
+
cache_dir: Path,
|
|
281
|
+
sem: asyncio.Semaphore,
|
|
282
|
+
dry_run: bool,
|
|
283
|
+
run_id: str,
|
|
284
|
+
) -> _Counts:
|
|
285
|
+
async with sem:
|
|
286
|
+
status_before = state.status
|
|
287
|
+
prior_filter = state.filter_result
|
|
288
|
+
|
|
289
|
+
local_path = Path(state.local_path)
|
|
290
|
+
local_path_str = str(local_path)
|
|
291
|
+
storage_uri = state.storage_uri
|
|
292
|
+
video_path = local_path
|
|
293
|
+
downloaded_path: Path | None = None
|
|
294
|
+
download_ms: int | None = None
|
|
295
|
+
analyze_ms: int | None = None
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
if not video_path.exists():
|
|
299
|
+
if state.storage_uri is None:
|
|
300
|
+
payload = _base_payload(
|
|
301
|
+
run_id=run_id,
|
|
302
|
+
event="cleanup.error",
|
|
303
|
+
clip_id=clip_id,
|
|
304
|
+
camera_name=state.camera_name,
|
|
305
|
+
created_at=created_at,
|
|
306
|
+
dry_run=dry_run,
|
|
307
|
+
status_before=status_before,
|
|
308
|
+
)
|
|
309
|
+
payload.update(
|
|
310
|
+
{
|
|
311
|
+
"error_code": "missing_local_and_storage_uri",
|
|
312
|
+
"local_path": local_path_str,
|
|
313
|
+
"storage_uri": storage_uri,
|
|
314
|
+
}
|
|
315
|
+
)
|
|
316
|
+
_log_json(
|
|
317
|
+
logging.WARNING,
|
|
318
|
+
"Cleanup error: missing local file and storage URI",
|
|
319
|
+
payload,
|
|
320
|
+
)
|
|
321
|
+
return _Counts(download_errors=1)
|
|
322
|
+
|
|
323
|
+
suffix = local_path.suffix or ".mp4"
|
|
324
|
+
downloaded_path = cache_dir / f"{_safe_filename(clip_id)}{suffix}"
|
|
325
|
+
download_start = time.monotonic()
|
|
326
|
+
await storage.get(state.storage_uri, downloaded_path)
|
|
327
|
+
download_ms = int((time.monotonic() - download_start) * 1000)
|
|
328
|
+
video_path = downloaded_path
|
|
329
|
+
except Exception as exc:
|
|
330
|
+
payload = _base_payload(
|
|
331
|
+
run_id=run_id,
|
|
332
|
+
event="cleanup.error",
|
|
333
|
+
clip_id=clip_id,
|
|
334
|
+
camera_name=state.camera_name,
|
|
335
|
+
created_at=created_at,
|
|
336
|
+
dry_run=dry_run,
|
|
337
|
+
status_before=status_before,
|
|
338
|
+
)
|
|
339
|
+
payload.update(
|
|
340
|
+
{
|
|
341
|
+
"error_code": "download_failed",
|
|
342
|
+
"error_detail": str(exc),
|
|
343
|
+
"local_path": local_path_str,
|
|
344
|
+
"storage_uri": storage_uri,
|
|
345
|
+
}
|
|
346
|
+
)
|
|
347
|
+
if download_ms is not None:
|
|
348
|
+
payload["download_ms"] = download_ms
|
|
349
|
+
_log_json(logging.WARNING, "Cleanup error: download failed", payload)
|
|
350
|
+
return _Counts(download_errors=1)
|
|
351
|
+
|
|
352
|
+
try:
|
|
353
|
+
analyze_start = time.monotonic()
|
|
354
|
+
result = await filter_plugin.detect(video_path)
|
|
355
|
+
analyze_ms = int((time.monotonic() - analyze_start) * 1000)
|
|
356
|
+
except Exception as exc:
|
|
357
|
+
if analyze_ms is None:
|
|
358
|
+
analyze_ms = int((time.monotonic() - analyze_start) * 1000)
|
|
359
|
+
payload = _base_payload(
|
|
360
|
+
run_id=run_id,
|
|
361
|
+
event="cleanup.error",
|
|
362
|
+
clip_id=clip_id,
|
|
363
|
+
camera_name=state.camera_name,
|
|
364
|
+
created_at=created_at,
|
|
365
|
+
dry_run=dry_run,
|
|
366
|
+
status_before=status_before,
|
|
367
|
+
)
|
|
368
|
+
payload.update(
|
|
369
|
+
{
|
|
370
|
+
"error_code": "reanalyze_failed",
|
|
371
|
+
"error_detail": str(exc),
|
|
372
|
+
"local_path": local_path_str,
|
|
373
|
+
"storage_uri": storage_uri,
|
|
374
|
+
}
|
|
375
|
+
)
|
|
376
|
+
if download_ms is not None:
|
|
377
|
+
payload["download_ms"] = download_ms
|
|
378
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
379
|
+
_log_json(logging.WARNING, "Cleanup error: reanalysis failed", payload)
|
|
380
|
+
return _Counts(reanalyzed=1, analyze_errors=1)
|
|
381
|
+
finally:
|
|
382
|
+
if downloaded_path is not None:
|
|
383
|
+
try:
|
|
384
|
+
downloaded_path.unlink(missing_ok=True)
|
|
385
|
+
except Exception:
|
|
386
|
+
pass
|
|
387
|
+
|
|
388
|
+
recheck_result = result
|
|
389
|
+
recheck = {
|
|
390
|
+
"detected_classes": list(recheck_result.detected_classes),
|
|
391
|
+
"confidence": float(recheck_result.confidence),
|
|
392
|
+
"model": str(recheck_result.model),
|
|
393
|
+
"sampled_frames": int(recheck_result.sampled_frames),
|
|
394
|
+
"settings": {
|
|
395
|
+
"model_path": str(getattr(recheck_cfg.config, "model_path", "")),
|
|
396
|
+
"min_confidence": float(getattr(recheck_cfg.config, "min_confidence", 0.0)),
|
|
397
|
+
"sample_fps": int(getattr(recheck_cfg.config, "sample_fps", 0)),
|
|
398
|
+
"min_box_h_ratio": float(getattr(recheck_cfg.config, "min_box_h_ratio", 0.0)),
|
|
399
|
+
"min_hits": int(getattr(recheck_cfg.config, "min_hits", 0)),
|
|
400
|
+
},
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
if recheck_result.detected_classes:
|
|
404
|
+
payload = _base_payload(
|
|
405
|
+
run_id=run_id,
|
|
406
|
+
event="cleanup.skipped_with_detection",
|
|
407
|
+
clip_id=clip_id,
|
|
408
|
+
camera_name=state.camera_name,
|
|
409
|
+
created_at=created_at,
|
|
410
|
+
dry_run=dry_run,
|
|
411
|
+
status_before=status_before,
|
|
412
|
+
status_after=status_before,
|
|
413
|
+
)
|
|
414
|
+
payload.update(
|
|
415
|
+
{
|
|
416
|
+
"prior_filter": prior_filter.model_dump(mode="json") if prior_filter else None,
|
|
417
|
+
"recheck_filter": recheck,
|
|
418
|
+
}
|
|
419
|
+
)
|
|
420
|
+
if download_ms is not None:
|
|
421
|
+
payload["download_ms"] = download_ms
|
|
422
|
+
if analyze_ms is not None:
|
|
423
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
424
|
+
_log_json(logging.INFO, "Cleanup skipped: detection found", payload)
|
|
425
|
+
if dry_run:
|
|
426
|
+
return _Counts(reanalyzed=1, false_negatives=1)
|
|
427
|
+
try:
|
|
428
|
+
await repo.record_clip_rechecked(
|
|
429
|
+
clip_id,
|
|
430
|
+
result=recheck_result,
|
|
431
|
+
prior_filter=prior_filter,
|
|
432
|
+
reason="cleanup_cli",
|
|
433
|
+
run_id=run_id,
|
|
434
|
+
)
|
|
435
|
+
except Exception as exc:
|
|
436
|
+
payload = _base_payload(
|
|
437
|
+
run_id=run_id,
|
|
438
|
+
event="cleanup.error",
|
|
439
|
+
clip_id=clip_id,
|
|
440
|
+
camera_name=state.camera_name,
|
|
441
|
+
created_at=created_at,
|
|
442
|
+
dry_run=dry_run,
|
|
443
|
+
status_before=status_before,
|
|
444
|
+
)
|
|
445
|
+
payload.update(
|
|
446
|
+
{
|
|
447
|
+
"error_code": "state_update_failed",
|
|
448
|
+
"error_detail": str(exc),
|
|
449
|
+
"local_path": local_path_str,
|
|
450
|
+
"storage_uri": storage_uri,
|
|
451
|
+
}
|
|
452
|
+
)
|
|
453
|
+
if download_ms is not None:
|
|
454
|
+
payload["download_ms"] = download_ms
|
|
455
|
+
if analyze_ms is not None:
|
|
456
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
457
|
+
_log_json(logging.WARNING, "Cleanup error: state update failed", payload)
|
|
458
|
+
return _Counts(reanalyzed=1, false_negatives=1, state_errors=1)
|
|
459
|
+
return _Counts(reanalyzed=1, false_negatives=1)
|
|
460
|
+
|
|
461
|
+
# Still empty after recheck.
|
|
462
|
+
delete_local_attempted = local_path.exists()
|
|
463
|
+
delete_storage_attempted = state.storage_uri is not None
|
|
464
|
+
|
|
465
|
+
if dry_run:
|
|
466
|
+
payload = _base_payload(
|
|
467
|
+
run_id=run_id,
|
|
468
|
+
event="cleanup.deleted",
|
|
469
|
+
clip_id=clip_id,
|
|
470
|
+
camera_name=state.camera_name,
|
|
471
|
+
created_at=created_at,
|
|
472
|
+
dry_run=dry_run,
|
|
473
|
+
status_before=status_before,
|
|
474
|
+
status_after=status_before,
|
|
475
|
+
)
|
|
476
|
+
payload.update(
|
|
477
|
+
{
|
|
478
|
+
"local_path": local_path_str,
|
|
479
|
+
"storage_uri": storage_uri,
|
|
480
|
+
"prior_filter": prior_filter.model_dump(mode="json") if prior_filter else None,
|
|
481
|
+
"recheck_filter": recheck,
|
|
482
|
+
"delete": {
|
|
483
|
+
"local": {"attempted": delete_local_attempted, "ok": None, "error": None},
|
|
484
|
+
"storage": {
|
|
485
|
+
"attempted": delete_storage_attempted,
|
|
486
|
+
"ok": None,
|
|
487
|
+
"error": None,
|
|
488
|
+
},
|
|
489
|
+
},
|
|
490
|
+
}
|
|
491
|
+
)
|
|
492
|
+
if download_ms is not None:
|
|
493
|
+
payload["download_ms"] = download_ms
|
|
494
|
+
if analyze_ms is not None:
|
|
495
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
496
|
+
_log_json(logging.WARNING, "Cleanup dry-run: would delete empty clip", payload)
|
|
497
|
+
return _Counts(reanalyzed=1)
|
|
498
|
+
|
|
499
|
+
delete_ms: int | None = None
|
|
500
|
+
delete_start = time.monotonic()
|
|
501
|
+
|
|
502
|
+
delete_local_ok = True
|
|
503
|
+
delete_local_err: str | None = None
|
|
504
|
+
if delete_local_attempted:
|
|
505
|
+
try:
|
|
506
|
+
local_path.unlink(missing_ok=True)
|
|
507
|
+
except Exception as exc:
|
|
508
|
+
delete_local_ok = False
|
|
509
|
+
delete_local_err = str(exc)
|
|
510
|
+
|
|
511
|
+
delete_storage_ok = True
|
|
512
|
+
delete_storage_err: str | None = None
|
|
513
|
+
if state.storage_uri is not None:
|
|
514
|
+
try:
|
|
515
|
+
await storage.delete(state.storage_uri)
|
|
516
|
+
except Exception as exc:
|
|
517
|
+
delete_storage_ok = False
|
|
518
|
+
delete_storage_err = str(exc)
|
|
519
|
+
|
|
520
|
+
delete_ms = int((time.monotonic() - delete_start) * 1000)
|
|
521
|
+
deleted_local = not local_path.exists()
|
|
522
|
+
deleted_storage = True if state.storage_uri is None else delete_storage_ok
|
|
523
|
+
|
|
524
|
+
if not delete_local_ok or not delete_storage_ok:
|
|
525
|
+
payload = _base_payload(
|
|
526
|
+
run_id=run_id,
|
|
527
|
+
event="cleanup.error",
|
|
528
|
+
clip_id=clip_id,
|
|
529
|
+
camera_name=state.camera_name,
|
|
530
|
+
created_at=created_at,
|
|
531
|
+
dry_run=dry_run,
|
|
532
|
+
status_before=status_before,
|
|
533
|
+
)
|
|
534
|
+
payload.update(
|
|
535
|
+
{
|
|
536
|
+
"error_code": "delete_failed",
|
|
537
|
+
"local_path": local_path_str,
|
|
538
|
+
"storage_uri": storage_uri,
|
|
539
|
+
"delete": {
|
|
540
|
+
"local": {
|
|
541
|
+
"attempted": delete_local_attempted,
|
|
542
|
+
"ok": delete_local_ok,
|
|
543
|
+
"error": delete_local_err,
|
|
544
|
+
},
|
|
545
|
+
"storage": {
|
|
546
|
+
"attempted": delete_storage_attempted,
|
|
547
|
+
"ok": delete_storage_ok,
|
|
548
|
+
"error": delete_storage_err,
|
|
549
|
+
},
|
|
550
|
+
},
|
|
551
|
+
}
|
|
552
|
+
)
|
|
553
|
+
if download_ms is not None:
|
|
554
|
+
payload["download_ms"] = download_ms
|
|
555
|
+
if analyze_ms is not None:
|
|
556
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
557
|
+
payload["delete_ms"] = delete_ms
|
|
558
|
+
_log_json(logging.WARNING, "Cleanup error: delete failed", payload)
|
|
559
|
+
return _Counts(reanalyzed=1, delete_errors=1)
|
|
560
|
+
|
|
561
|
+
try:
|
|
562
|
+
await repo.record_clip_deleted(
|
|
563
|
+
clip_id,
|
|
564
|
+
reason="cleanup_cli",
|
|
565
|
+
run_id=run_id,
|
|
566
|
+
deleted_local=deleted_local,
|
|
567
|
+
deleted_storage=deleted_storage,
|
|
568
|
+
)
|
|
569
|
+
except Exception as exc:
|
|
570
|
+
payload = _base_payload(
|
|
571
|
+
run_id=run_id,
|
|
572
|
+
event="cleanup.error",
|
|
573
|
+
clip_id=clip_id,
|
|
574
|
+
camera_name=state.camera_name,
|
|
575
|
+
created_at=created_at,
|
|
576
|
+
dry_run=dry_run,
|
|
577
|
+
status_before=status_before,
|
|
578
|
+
)
|
|
579
|
+
payload.update(
|
|
580
|
+
{
|
|
581
|
+
"error_code": "state_update_failed",
|
|
582
|
+
"error_detail": str(exc),
|
|
583
|
+
"local_path": local_path_str,
|
|
584
|
+
"storage_uri": storage_uri,
|
|
585
|
+
}
|
|
586
|
+
)
|
|
587
|
+
if download_ms is not None:
|
|
588
|
+
payload["download_ms"] = download_ms
|
|
589
|
+
if analyze_ms is not None:
|
|
590
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
591
|
+
payload["delete_ms"] = delete_ms
|
|
592
|
+
_log_json(logging.WARNING, "Cleanup error: state update failed", payload)
|
|
593
|
+
return _Counts(reanalyzed=1, state_errors=1)
|
|
594
|
+
|
|
595
|
+
payload = _base_payload(
|
|
596
|
+
run_id=run_id,
|
|
597
|
+
event="cleanup.deleted",
|
|
598
|
+
clip_id=clip_id,
|
|
599
|
+
camera_name=state.camera_name,
|
|
600
|
+
created_at=created_at,
|
|
601
|
+
dry_run=dry_run,
|
|
602
|
+
status_before=status_before,
|
|
603
|
+
status_after="deleted",
|
|
604
|
+
)
|
|
605
|
+
payload.update(
|
|
606
|
+
{
|
|
607
|
+
"local_path": local_path_str,
|
|
608
|
+
"storage_uri": storage_uri,
|
|
609
|
+
"prior_filter": prior_filter.model_dump(mode="json") if prior_filter else None,
|
|
610
|
+
"recheck_filter": recheck,
|
|
611
|
+
"delete": {
|
|
612
|
+
"local": {
|
|
613
|
+
"attempted": delete_local_attempted,
|
|
614
|
+
"ok": delete_local_ok,
|
|
615
|
+
"error": delete_local_err,
|
|
616
|
+
},
|
|
617
|
+
"storage": {
|
|
618
|
+
"attempted": delete_storage_attempted,
|
|
619
|
+
"ok": delete_storage_ok,
|
|
620
|
+
"error": delete_storage_err,
|
|
621
|
+
},
|
|
622
|
+
},
|
|
623
|
+
}
|
|
624
|
+
)
|
|
625
|
+
if download_ms is not None:
|
|
626
|
+
payload["download_ms"] = download_ms
|
|
627
|
+
if analyze_ms is not None:
|
|
628
|
+
payload["reanalyze_ms"] = analyze_ms
|
|
629
|
+
payload["delete_ms"] = delete_ms
|
|
630
|
+
_log_json(logging.WARNING, "Cleanup deleted empty clip", payload)
|
|
631
|
+
|
|
632
|
+
return _Counts(reanalyzed=1, deleted=1)
|