homesec 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- homesec/__init__.py +20 -0
- homesec/app.py +393 -0
- homesec/cli.py +159 -0
- homesec/config/__init__.py +18 -0
- homesec/config/loader.py +109 -0
- homesec/config/validation.py +82 -0
- homesec/errors.py +71 -0
- homesec/health/__init__.py +5 -0
- homesec/health/server.py +226 -0
- homesec/interfaces.py +249 -0
- homesec/logging_setup.py +176 -0
- homesec/maintenance/__init__.py +1 -0
- homesec/maintenance/cleanup_clips.py +632 -0
- homesec/models/__init__.py +79 -0
- homesec/models/alert.py +32 -0
- homesec/models/clip.py +71 -0
- homesec/models/config.py +362 -0
- homesec/models/events.py +184 -0
- homesec/models/filter.py +62 -0
- homesec/models/source.py +77 -0
- homesec/models/storage.py +12 -0
- homesec/models/vlm.py +99 -0
- homesec/pipeline/__init__.py +6 -0
- homesec/pipeline/alert_policy.py +5 -0
- homesec/pipeline/core.py +639 -0
- homesec/plugins/__init__.py +62 -0
- homesec/plugins/alert_policies/__init__.py +80 -0
- homesec/plugins/alert_policies/default.py +111 -0
- homesec/plugins/alert_policies/noop.py +60 -0
- homesec/plugins/analyzers/__init__.py +126 -0
- homesec/plugins/analyzers/openai.py +446 -0
- homesec/plugins/filters/__init__.py +124 -0
- homesec/plugins/filters/yolo.py +317 -0
- homesec/plugins/notifiers/__init__.py +80 -0
- homesec/plugins/notifiers/mqtt.py +189 -0
- homesec/plugins/notifiers/multiplex.py +106 -0
- homesec/plugins/notifiers/sendgrid_email.py +228 -0
- homesec/plugins/storage/__init__.py +116 -0
- homesec/plugins/storage/dropbox.py +272 -0
- homesec/plugins/storage/local.py +108 -0
- homesec/plugins/utils.py +63 -0
- homesec/py.typed +0 -0
- homesec/repository/__init__.py +5 -0
- homesec/repository/clip_repository.py +552 -0
- homesec/sources/__init__.py +17 -0
- homesec/sources/base.py +224 -0
- homesec/sources/ftp.py +209 -0
- homesec/sources/local_folder.py +238 -0
- homesec/sources/rtsp.py +1251 -0
- homesec/state/__init__.py +10 -0
- homesec/state/postgres.py +501 -0
- homesec/storage_paths.py +46 -0
- homesec/telemetry/__init__.py +0 -0
- homesec/telemetry/db/__init__.py +1 -0
- homesec/telemetry/db/log_table.py +16 -0
- homesec/telemetry/db_log_handler.py +246 -0
- homesec/telemetry/postgres_settings.py +42 -0
- homesec-0.1.0.dist-info/METADATA +446 -0
- homesec-0.1.0.dist-info/RECORD +62 -0
- homesec-0.1.0.dist-info/WHEEL +4 -0
- homesec-0.1.0.dist-info/entry_points.txt +2 -0
- homesec-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
"""Postgres implementation of StateStore and EventStore."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Any, cast
|
|
9
|
+
|
|
10
|
+
from sqlalchemy import (
|
|
11
|
+
BigInteger,
|
|
12
|
+
DateTime,
|
|
13
|
+
ForeignKey,
|
|
14
|
+
Index,
|
|
15
|
+
Table,
|
|
16
|
+
Text,
|
|
17
|
+
and_,
|
|
18
|
+
func,
|
|
19
|
+
or_,
|
|
20
|
+
select,
|
|
21
|
+
)
|
|
22
|
+
from sqlalchemy.dialects.postgresql import JSONB, insert as pg_insert
|
|
23
|
+
from sqlalchemy.exc import DBAPIError, OperationalError
|
|
24
|
+
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
|
|
25
|
+
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
|
26
|
+
|
|
27
|
+
from homesec.models.clip import ClipStateData
|
|
28
|
+
from homesec.models.events import (
|
|
29
|
+
ClipDeletedEvent,
|
|
30
|
+
ClipRecheckedEvent,
|
|
31
|
+
ClipEvent as ClipEventModel,
|
|
32
|
+
ClipLifecycleEvent,
|
|
33
|
+
ClipRecordedEvent,
|
|
34
|
+
UploadStartedEvent,
|
|
35
|
+
UploadCompletedEvent,
|
|
36
|
+
UploadFailedEvent,
|
|
37
|
+
FilterStartedEvent,
|
|
38
|
+
FilterCompletedEvent,
|
|
39
|
+
FilterFailedEvent,
|
|
40
|
+
VLMStartedEvent,
|
|
41
|
+
VLMCompletedEvent,
|
|
42
|
+
VLMFailedEvent,
|
|
43
|
+
VLMSkippedEvent,
|
|
44
|
+
AlertDecisionMadeEvent,
|
|
45
|
+
NotificationSentEvent,
|
|
46
|
+
NotificationFailedEvent,
|
|
47
|
+
)
|
|
48
|
+
from homesec.interfaces import EventStore, StateStore
|
|
49
|
+
|
|
50
|
+
logger = logging.getLogger(__name__)
|
|
51
|
+
|
|
52
|
+
_EVENT_TYPE_MAP: dict[str, type[ClipEventModel]] = {
|
|
53
|
+
"clip_recorded": ClipRecordedEvent,
|
|
54
|
+
"clip_deleted": ClipDeletedEvent,
|
|
55
|
+
"clip_rechecked": ClipRecheckedEvent,
|
|
56
|
+
"upload_started": UploadStartedEvent,
|
|
57
|
+
"upload_completed": UploadCompletedEvent,
|
|
58
|
+
"upload_failed": UploadFailedEvent,
|
|
59
|
+
"filter_started": FilterStartedEvent,
|
|
60
|
+
"filter_completed": FilterCompletedEvent,
|
|
61
|
+
"filter_failed": FilterFailedEvent,
|
|
62
|
+
"vlm_started": VLMStartedEvent,
|
|
63
|
+
"vlm_completed": VLMCompletedEvent,
|
|
64
|
+
"vlm_failed": VLMFailedEvent,
|
|
65
|
+
"vlm_skipped": VLMSkippedEvent,
|
|
66
|
+
"alert_decision_made": AlertDecisionMadeEvent,
|
|
67
|
+
"notification_sent": NotificationSentEvent,
|
|
68
|
+
"notification_failed": NotificationFailedEvent,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class Base(DeclarativeBase):
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ClipState(Base):
|
|
77
|
+
"""Current state snapshot (lightweight, fast queries)."""
|
|
78
|
+
__tablename__ = "clip_states"
|
|
79
|
+
|
|
80
|
+
clip_id: Mapped[str] = mapped_column(Text, primary_key=True)
|
|
81
|
+
data: Mapped[dict[str, Any]] = mapped_column(JSONB, nullable=False)
|
|
82
|
+
created_at: Mapped[datetime] = mapped_column(
|
|
83
|
+
DateTime(timezone=True),
|
|
84
|
+
server_default=func.now(),
|
|
85
|
+
nullable=False,
|
|
86
|
+
)
|
|
87
|
+
updated_at: Mapped[datetime] = mapped_column(
|
|
88
|
+
DateTime(timezone=True),
|
|
89
|
+
server_default=func.now(),
|
|
90
|
+
onupdate=func.now(),
|
|
91
|
+
nullable=False,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
__table_args__ = (
|
|
95
|
+
Index("idx_clip_states_status", func.jsonb_extract_path_text(data, "status")),
|
|
96
|
+
Index("idx_clip_states_camera", func.jsonb_extract_path_text(data, "camera_name")),
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class ClipEvent(Base):
|
|
101
|
+
"""Event history (append-only audit log)."""
|
|
102
|
+
__tablename__ = "clip_events"
|
|
103
|
+
|
|
104
|
+
id: Mapped[int] = mapped_column(BigInteger, primary_key=True, autoincrement=True)
|
|
105
|
+
clip_id: Mapped[str] = mapped_column(
|
|
106
|
+
Text,
|
|
107
|
+
ForeignKey("clip_states.clip_id", ondelete="CASCADE"),
|
|
108
|
+
nullable=False,
|
|
109
|
+
)
|
|
110
|
+
timestamp: Mapped[datetime] = mapped_column(
|
|
111
|
+
DateTime(timezone=True),
|
|
112
|
+
nullable=False,
|
|
113
|
+
)
|
|
114
|
+
event_type: Mapped[str] = mapped_column(Text, nullable=False)
|
|
115
|
+
event_data: Mapped[dict[str, Any]] = mapped_column(JSONB, nullable=False)
|
|
116
|
+
|
|
117
|
+
__table_args__ = (
|
|
118
|
+
Index("idx_clip_events_clip_id", "clip_id"),
|
|
119
|
+
Index("idx_clip_events_clip_id_id", "clip_id", "id"),
|
|
120
|
+
Index("idx_clip_events_timestamp", "timestamp"),
|
|
121
|
+
Index("idx_clip_events_type", "event_type"),
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _normalize_async_dsn(dsn: str) -> str:
|
|
127
|
+
if "+asyncpg" in dsn:
|
|
128
|
+
return dsn
|
|
129
|
+
if dsn.startswith("postgresql://"):
|
|
130
|
+
return dsn.replace("postgresql://", "postgresql+asyncpg://", 1)
|
|
131
|
+
if dsn.startswith("postgres://"):
|
|
132
|
+
return dsn.replace("postgres://", "postgresql+asyncpg://", 1)
|
|
133
|
+
return dsn
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class PostgresStateStore(StateStore):
|
|
137
|
+
"""Postgres implementation of StateStore interface.
|
|
138
|
+
|
|
139
|
+
Implements graceful degradation: operations return None/False
|
|
140
|
+
instead of raising when DB is unavailable.
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
def __init__(self, dsn: str) -> None:
|
|
144
|
+
"""Initialize state store.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
dsn: Postgres connection string (e.g., "postgresql+asyncpg://user:pass@host/db")
|
|
148
|
+
"""
|
|
149
|
+
self._dsn = _normalize_async_dsn(dsn)
|
|
150
|
+
self._engine: AsyncEngine | None = None
|
|
151
|
+
|
|
152
|
+
async def initialize(self) -> bool:
|
|
153
|
+
"""Initialize connection pool and create table if not exists.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
True if initialization succeeded, False otherwise
|
|
157
|
+
"""
|
|
158
|
+
try:
|
|
159
|
+
self._engine = create_async_engine(
|
|
160
|
+
self._dsn,
|
|
161
|
+
pool_pre_ping=True,
|
|
162
|
+
pool_size=5,
|
|
163
|
+
max_overflow=0,
|
|
164
|
+
)
|
|
165
|
+
async with self._engine.begin() as conn:
|
|
166
|
+
await self._create_tables(conn)
|
|
167
|
+
logger.info("PostgresStateStore initialized successfully")
|
|
168
|
+
return True
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.error(
|
|
171
|
+
"Failed to initialize PostgresStateStore: %s", e, exc_info=True
|
|
172
|
+
)
|
|
173
|
+
if self._engine is not None:
|
|
174
|
+
await self._engine.dispose()
|
|
175
|
+
self._engine = None
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
async def _create_tables(self, conn: AsyncConnection) -> None:
|
|
179
|
+
"""Create all tables (clip_states + clip_events)."""
|
|
180
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
181
|
+
|
|
182
|
+
async def upsert(self, clip_id: str, data: ClipStateData) -> None:
|
|
183
|
+
"""Insert or update clip state.
|
|
184
|
+
|
|
185
|
+
Raises on execution errors so callers can retry/log appropriately.
|
|
186
|
+
"""
|
|
187
|
+
if self._engine is None:
|
|
188
|
+
logger.warning("StateStore not initialized, skipping upsert for %s", clip_id)
|
|
189
|
+
return
|
|
190
|
+
|
|
191
|
+
json_data = data.model_dump(mode="json")
|
|
192
|
+
table = cast(Table, ClipState.__table__)
|
|
193
|
+
stmt = pg_insert(table).values(
|
|
194
|
+
clip_id=clip_id,
|
|
195
|
+
data=json_data,
|
|
196
|
+
updated_at=func.now(),
|
|
197
|
+
)
|
|
198
|
+
stmt = stmt.on_conflict_do_update(
|
|
199
|
+
index_elements=[table.c.clip_id],
|
|
200
|
+
set_={"data": stmt.excluded.data, "updated_at": func.now()},
|
|
201
|
+
)
|
|
202
|
+
async with self._engine.begin() as conn:
|
|
203
|
+
await conn.execute(stmt)
|
|
204
|
+
|
|
205
|
+
async def get(self, clip_id: str) -> ClipStateData | None:
|
|
206
|
+
"""Retrieve clip state.
|
|
207
|
+
|
|
208
|
+
Graceful degradation: returns None if DB unavailable or error occurs.
|
|
209
|
+
"""
|
|
210
|
+
if self._engine is None:
|
|
211
|
+
logger.warning("StateStore not initialized, returning None for %s", clip_id)
|
|
212
|
+
return None
|
|
213
|
+
|
|
214
|
+
try:
|
|
215
|
+
async with self._engine.connect() as conn:
|
|
216
|
+
result = await conn.execute(
|
|
217
|
+
select(ClipState.data).where(ClipState.clip_id == clip_id)
|
|
218
|
+
)
|
|
219
|
+
raw = result.scalar_one_or_none()
|
|
220
|
+
if raw is None:
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
# Parse JSON and validate with Pydantic
|
|
224
|
+
data_dict = self._parse_state_data(raw)
|
|
225
|
+
return ClipStateData.model_validate(data_dict)
|
|
226
|
+
except Exception as e:
|
|
227
|
+
logger.error(
|
|
228
|
+
"Failed to get clip state for %s: %s",
|
|
229
|
+
clip_id,
|
|
230
|
+
e,
|
|
231
|
+
exc_info=True,
|
|
232
|
+
)
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
async def list_candidate_clips_for_cleanup(
|
|
236
|
+
self,
|
|
237
|
+
*,
|
|
238
|
+
older_than_days: int | None,
|
|
239
|
+
camera_name: str | None,
|
|
240
|
+
batch_size: int,
|
|
241
|
+
cursor: tuple[datetime, str] | None = None,
|
|
242
|
+
) -> list[tuple[str, ClipStateData, datetime]]:
|
|
243
|
+
"""List clip states to scan for cleanup.
|
|
244
|
+
|
|
245
|
+
Uses keyset pagination (cursor) instead of OFFSET so that the caller can
|
|
246
|
+
safely update rows (e.g., mark clips deleted) without skipping entries.
|
|
247
|
+
|
|
248
|
+
Cursor is `(created_at, clip_id)` from the last row of the previous page.
|
|
249
|
+
"""
|
|
250
|
+
if self._engine is None:
|
|
251
|
+
logger.warning("StateStore not initialized, returning empty cleanup candidate list")
|
|
252
|
+
return []
|
|
253
|
+
|
|
254
|
+
if batch_size < 1:
|
|
255
|
+
raise ValueError("batch_size must be >= 1")
|
|
256
|
+
if older_than_days is not None and older_than_days < 0:
|
|
257
|
+
raise ValueError("older_than_days must be >= 0")
|
|
258
|
+
|
|
259
|
+
status_expr = func.jsonb_extract_path_text(ClipState.data, "status")
|
|
260
|
+
camera_expr = func.jsonb_extract_path_text(ClipState.data, "camera_name")
|
|
261
|
+
|
|
262
|
+
conditions = [or_(status_expr.is_(None), status_expr != "deleted")]
|
|
263
|
+
if camera_name is not None:
|
|
264
|
+
conditions.append(camera_expr == camera_name)
|
|
265
|
+
if older_than_days is not None:
|
|
266
|
+
conditions.append(
|
|
267
|
+
ClipState.created_at
|
|
268
|
+
< func.now() - func.make_interval(days=int(older_than_days))
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
if cursor is not None:
|
|
272
|
+
after_created_at, after_clip_id = cursor
|
|
273
|
+
conditions.append(
|
|
274
|
+
or_(
|
|
275
|
+
ClipState.created_at > after_created_at,
|
|
276
|
+
and_(
|
|
277
|
+
ClipState.created_at == after_created_at,
|
|
278
|
+
ClipState.clip_id > after_clip_id,
|
|
279
|
+
),
|
|
280
|
+
)
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
query = (
|
|
284
|
+
select(ClipState.clip_id, ClipState.data, ClipState.created_at)
|
|
285
|
+
.where(and_(*conditions))
|
|
286
|
+
.order_by(ClipState.created_at.asc(), ClipState.clip_id.asc())
|
|
287
|
+
.limit(int(batch_size))
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
async with self._engine.connect() as conn:
|
|
291
|
+
result = await conn.execute(query)
|
|
292
|
+
rows = result.all()
|
|
293
|
+
|
|
294
|
+
items: list[tuple[str, ClipStateData, datetime]] = []
|
|
295
|
+
for clip_id, raw, created_at in rows:
|
|
296
|
+
try:
|
|
297
|
+
data_dict = self._parse_state_data(raw)
|
|
298
|
+
state = ClipStateData.model_validate(data_dict)
|
|
299
|
+
except Exception as exc:
|
|
300
|
+
logger.warning(
|
|
301
|
+
"Failed parsing clip state for cleanup: %s error=%s",
|
|
302
|
+
clip_id,
|
|
303
|
+
exc,
|
|
304
|
+
exc_info=True,
|
|
305
|
+
)
|
|
306
|
+
continue
|
|
307
|
+
|
|
308
|
+
items.append((clip_id, state, created_at))
|
|
309
|
+
|
|
310
|
+
return items
|
|
311
|
+
|
|
312
|
+
async def ping(self) -> bool:
|
|
313
|
+
"""Health check.
|
|
314
|
+
|
|
315
|
+
Returns True if database is reachable, False otherwise.
|
|
316
|
+
"""
|
|
317
|
+
if self._engine is None:
|
|
318
|
+
return False
|
|
319
|
+
|
|
320
|
+
try:
|
|
321
|
+
async with self._engine.connect() as conn:
|
|
322
|
+
await conn.execute(select(1))
|
|
323
|
+
return True
|
|
324
|
+
except Exception as e:
|
|
325
|
+
logger.warning("Database ping failed: %s", e, exc_info=True)
|
|
326
|
+
return False
|
|
327
|
+
|
|
328
|
+
async def shutdown(self, timeout: float | None = None) -> None:
|
|
329
|
+
"""Close connection pool."""
|
|
330
|
+
_ = timeout
|
|
331
|
+
if self._engine is not None:
|
|
332
|
+
await self._engine.dispose()
|
|
333
|
+
self._engine = None
|
|
334
|
+
logger.info("PostgresStateStore closed")
|
|
335
|
+
|
|
336
|
+
@staticmethod
|
|
337
|
+
def _parse_state_data(raw: object) -> dict[str, Any]:
|
|
338
|
+
"""Parse JSONB payload from SQLAlchemy into a dict."""
|
|
339
|
+
return _parse_jsonb_payload(raw)
|
|
340
|
+
|
|
341
|
+
def create_event_store(self) -> "PostgresEventStore | NoopEventStore":
|
|
342
|
+
"""Create a Postgres-backed event store or a no-op fallback."""
|
|
343
|
+
if self._engine is None:
|
|
344
|
+
return NoopEventStore()
|
|
345
|
+
return PostgresEventStore(self._engine)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def _parse_jsonb_payload(raw: object) -> dict[str, Any]:
|
|
349
|
+
"""Parse JSONB payload from SQLAlchemy into a dict."""
|
|
350
|
+
match raw:
|
|
351
|
+
case dict():
|
|
352
|
+
return cast(dict[str, Any], raw)
|
|
353
|
+
case str():
|
|
354
|
+
return cast(dict[str, Any], json.loads(raw))
|
|
355
|
+
case bytes() | bytearray():
|
|
356
|
+
return cast(dict[str, Any], json.loads(raw.decode("utf-8")))
|
|
357
|
+
case _:
|
|
358
|
+
raise TypeError(f"Unsupported JSONB payload type: {type(raw).__name__}")
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
_RETRYABLE_SQLSTATES = {
|
|
362
|
+
"08000", # connection_exception
|
|
363
|
+
"08003", # connection_does_not_exist
|
|
364
|
+
"08006", # connection_failure
|
|
365
|
+
"08007", # transaction_resolution_unknown
|
|
366
|
+
"08001", # sqlclient_unable_to_establish_sqlconnection
|
|
367
|
+
"08004", # sqlserver_rejected_establishment_of_sqlconnection
|
|
368
|
+
"40P01", # deadlock_detected
|
|
369
|
+
"40001", # serialization_failure
|
|
370
|
+
"53300", # too_many_connections
|
|
371
|
+
"57P01", # admin_shutdown
|
|
372
|
+
"57P02", # crash_shutdown
|
|
373
|
+
"57P03", # cannot_connect_now
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _extract_sqlstate(exc: BaseException) -> str | None:
|
|
378
|
+
for candidate in (exc, getattr(exc, "orig", None)):
|
|
379
|
+
if candidate is None:
|
|
380
|
+
continue
|
|
381
|
+
sqlstate = getattr(candidate, "sqlstate", None) or getattr(candidate, "pgcode", None)
|
|
382
|
+
if sqlstate:
|
|
383
|
+
return str(sqlstate)
|
|
384
|
+
return None
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def is_retryable_pg_error(exc: Exception) -> bool:
|
|
388
|
+
"""Return True if the exception is likely a transient Postgres error."""
|
|
389
|
+
if isinstance(exc, OperationalError):
|
|
390
|
+
return True
|
|
391
|
+
if isinstance(exc, DBAPIError) and exc.connection_invalidated:
|
|
392
|
+
return True
|
|
393
|
+
sqlstate = _extract_sqlstate(exc)
|
|
394
|
+
return sqlstate in _RETRYABLE_SQLSTATES
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
class PostgresEventStore(EventStore):
|
|
398
|
+
"""Postgres implementation of EventStore interface."""
|
|
399
|
+
|
|
400
|
+
def __init__(self, engine: AsyncEngine) -> None:
|
|
401
|
+
"""Initialize with shared engine from StateStore."""
|
|
402
|
+
self._engine = engine
|
|
403
|
+
|
|
404
|
+
async def append(self, event: ClipLifecycleEvent) -> None:
|
|
405
|
+
"""Append a single event."""
|
|
406
|
+
try:
|
|
407
|
+
async with self._engine.begin() as conn:
|
|
408
|
+
table = cast(Any, ClipEvent.__table__)
|
|
409
|
+
payload = {
|
|
410
|
+
"clip_id": event.clip_id,
|
|
411
|
+
"timestamp": event.timestamp,
|
|
412
|
+
"event_type": event.event_type,
|
|
413
|
+
"event_data": event.model_dump(
|
|
414
|
+
mode="json",
|
|
415
|
+
exclude={"id", "event_type"},
|
|
416
|
+
),
|
|
417
|
+
}
|
|
418
|
+
await conn.execute(pg_insert(table), [payload])
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error("Failed to append event: %s", e, exc_info=e)
|
|
421
|
+
raise
|
|
422
|
+
|
|
423
|
+
async def get_events(
|
|
424
|
+
self,
|
|
425
|
+
clip_id: str,
|
|
426
|
+
after_id: int | None = None,
|
|
427
|
+
) -> list[ClipLifecycleEvent]:
|
|
428
|
+
"""Get all events for a clip, optionally after an event id."""
|
|
429
|
+
try:
|
|
430
|
+
query = (
|
|
431
|
+
select(ClipEvent.id, ClipEvent.event_type, ClipEvent.event_data)
|
|
432
|
+
.where(ClipEvent.clip_id == clip_id)
|
|
433
|
+
)
|
|
434
|
+
if after_id is not None:
|
|
435
|
+
query = query.where(ClipEvent.id > after_id)
|
|
436
|
+
query = query.order_by(ClipEvent.id)
|
|
437
|
+
|
|
438
|
+
async with self._engine.connect() as conn:
|
|
439
|
+
result = await conn.execute(query)
|
|
440
|
+
rows = result.all()
|
|
441
|
+
|
|
442
|
+
events: list[ClipLifecycleEvent] = []
|
|
443
|
+
for event_id, event_type, event_data in rows:
|
|
444
|
+
event_dict = _parse_jsonb_payload(event_data)
|
|
445
|
+
event_dict.setdefault("event_type", event_type)
|
|
446
|
+
event_dict["id"] = event_id
|
|
447
|
+
event_cls = _EVENT_TYPE_MAP.get(event_type)
|
|
448
|
+
if event_cls is None:
|
|
449
|
+
logger.warning("Unknown event type: %s", event_type)
|
|
450
|
+
continue
|
|
451
|
+
event = event_cls.model_validate(event_dict)
|
|
452
|
+
events.append(cast(ClipLifecycleEvent, event))
|
|
453
|
+
|
|
454
|
+
return events
|
|
455
|
+
except Exception as e:
|
|
456
|
+
logger.error("Failed to get events for %s: %s", clip_id, e, exc_info=e)
|
|
457
|
+
return []
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
class NoopEventStore(EventStore):
|
|
461
|
+
"""Event store that drops events (used when Postgres is unavailable)."""
|
|
462
|
+
|
|
463
|
+
async def append(self, event: ClipLifecycleEvent) -> None:
|
|
464
|
+
return
|
|
465
|
+
|
|
466
|
+
async def get_events(
|
|
467
|
+
self,
|
|
468
|
+
clip_id: str,
|
|
469
|
+
after_id: int | None = None,
|
|
470
|
+
) -> list[ClipLifecycleEvent]:
|
|
471
|
+
return []
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
class NoopStateStore(StateStore):
|
|
475
|
+
"""State store that drops writes and returns no data."""
|
|
476
|
+
|
|
477
|
+
async def upsert(self, clip_id: str, data: ClipStateData) -> None:
|
|
478
|
+
return
|
|
479
|
+
|
|
480
|
+
async def get(self, clip_id: str) -> ClipStateData | None:
|
|
481
|
+
return None
|
|
482
|
+
|
|
483
|
+
async def list_candidate_clips_for_cleanup(
|
|
484
|
+
self,
|
|
485
|
+
*,
|
|
486
|
+
older_than_days: int | None,
|
|
487
|
+
camera_name: str | None,
|
|
488
|
+
batch_size: int,
|
|
489
|
+
cursor: tuple[datetime, str] | None = None,
|
|
490
|
+
) -> list[tuple[str, ClipStateData, datetime]]:
|
|
491
|
+
_ = older_than_days
|
|
492
|
+
_ = camera_name
|
|
493
|
+
_ = batch_size
|
|
494
|
+
_ = cursor
|
|
495
|
+
return []
|
|
496
|
+
|
|
497
|
+
async def shutdown(self, timeout: float | None = None) -> None:
|
|
498
|
+
return
|
|
499
|
+
|
|
500
|
+
async def ping(self) -> bool:
|
|
501
|
+
return False
|
homesec/storage_paths.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Helpers for building storage destination paths."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import PurePosixPath
|
|
6
|
+
|
|
7
|
+
from homesec.models.clip import Clip
|
|
8
|
+
from homesec.models.config import StoragePathsConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _sanitize_segment(value: str) -> str:
|
|
12
|
+
cleaned = value.strip().replace("/", "_").replace("\\", "_")
|
|
13
|
+
cleaned = "_".join(part for part in cleaned.split() if part)
|
|
14
|
+
return cleaned or "unknown"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _normalize_dest_path(path: PurePosixPath) -> str:
|
|
18
|
+
if path.is_absolute():
|
|
19
|
+
raise ValueError(f"dest_path must be relative, got {path}")
|
|
20
|
+
for part in path.parts:
|
|
21
|
+
if part in ("", ".", ".."):
|
|
22
|
+
raise ValueError(f"dest_path contains invalid segment: {path}")
|
|
23
|
+
return str(path)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def build_clip_path(clip: Clip, paths_cfg: StoragePathsConfig) -> str:
|
|
27
|
+
"""Build destination path for a clip using configured defaults."""
|
|
28
|
+
camera = _sanitize_segment(clip.camera_name)
|
|
29
|
+
raw_name = clip.local_path.name or f"{clip.clip_id}{clip.local_path.suffix or '.mp4'}"
|
|
30
|
+
filename = _sanitize_segment(raw_name)
|
|
31
|
+
path = PurePosixPath(paths_cfg.clips_dir) / camera / filename
|
|
32
|
+
return _normalize_dest_path(path)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def build_backup_path(name: str, paths_cfg: StoragePathsConfig) -> str:
|
|
36
|
+
"""Build destination path for a backup file."""
|
|
37
|
+
filename = _sanitize_segment(name)
|
|
38
|
+
path = PurePosixPath(paths_cfg.backups_dir) / filename
|
|
39
|
+
return _normalize_dest_path(path)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def build_artifact_path(name: str, paths_cfg: StoragePathsConfig) -> str:
|
|
43
|
+
"""Build destination path for an artifact file."""
|
|
44
|
+
filename = _sanitize_segment(name)
|
|
45
|
+
path = PurePosixPath(paths_cfg.artifacts_dir) / filename
|
|
46
|
+
return _normalize_dest_path(path)
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import BigInteger, Column, DateTime, Index, MetaData, Table, func
|
|
4
|
+
from sqlalchemy.dialects.postgresql import JSONB
|
|
5
|
+
|
|
6
|
+
metadata = MetaData()
|
|
7
|
+
|
|
8
|
+
logs = Table(
|
|
9
|
+
"logs",
|
|
10
|
+
metadata,
|
|
11
|
+
Column("id", BigInteger, primary_key=True, autoincrement=True),
|
|
12
|
+
Column("ts", DateTime(timezone=True), server_default=func.now(), nullable=False),
|
|
13
|
+
Column("payload", JSONB, nullable=False),
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
Index("logs_ts_idx", logs.c.ts.desc())
|