homesec 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. homesec/__init__.py +20 -0
  2. homesec/app.py +393 -0
  3. homesec/cli.py +159 -0
  4. homesec/config/__init__.py +18 -0
  5. homesec/config/loader.py +109 -0
  6. homesec/config/validation.py +82 -0
  7. homesec/errors.py +71 -0
  8. homesec/health/__init__.py +5 -0
  9. homesec/health/server.py +226 -0
  10. homesec/interfaces.py +249 -0
  11. homesec/logging_setup.py +176 -0
  12. homesec/maintenance/__init__.py +1 -0
  13. homesec/maintenance/cleanup_clips.py +632 -0
  14. homesec/models/__init__.py +79 -0
  15. homesec/models/alert.py +32 -0
  16. homesec/models/clip.py +71 -0
  17. homesec/models/config.py +362 -0
  18. homesec/models/events.py +184 -0
  19. homesec/models/filter.py +62 -0
  20. homesec/models/source.py +77 -0
  21. homesec/models/storage.py +12 -0
  22. homesec/models/vlm.py +99 -0
  23. homesec/pipeline/__init__.py +6 -0
  24. homesec/pipeline/alert_policy.py +5 -0
  25. homesec/pipeline/core.py +639 -0
  26. homesec/plugins/__init__.py +62 -0
  27. homesec/plugins/alert_policies/__init__.py +80 -0
  28. homesec/plugins/alert_policies/default.py +111 -0
  29. homesec/plugins/alert_policies/noop.py +60 -0
  30. homesec/plugins/analyzers/__init__.py +126 -0
  31. homesec/plugins/analyzers/openai.py +446 -0
  32. homesec/plugins/filters/__init__.py +124 -0
  33. homesec/plugins/filters/yolo.py +317 -0
  34. homesec/plugins/notifiers/__init__.py +80 -0
  35. homesec/plugins/notifiers/mqtt.py +189 -0
  36. homesec/plugins/notifiers/multiplex.py +106 -0
  37. homesec/plugins/notifiers/sendgrid_email.py +228 -0
  38. homesec/plugins/storage/__init__.py +116 -0
  39. homesec/plugins/storage/dropbox.py +272 -0
  40. homesec/plugins/storage/local.py +108 -0
  41. homesec/plugins/utils.py +63 -0
  42. homesec/py.typed +0 -0
  43. homesec/repository/__init__.py +5 -0
  44. homesec/repository/clip_repository.py +552 -0
  45. homesec/sources/__init__.py +17 -0
  46. homesec/sources/base.py +224 -0
  47. homesec/sources/ftp.py +209 -0
  48. homesec/sources/local_folder.py +238 -0
  49. homesec/sources/rtsp.py +1251 -0
  50. homesec/state/__init__.py +10 -0
  51. homesec/state/postgres.py +501 -0
  52. homesec/storage_paths.py +46 -0
  53. homesec/telemetry/__init__.py +0 -0
  54. homesec/telemetry/db/__init__.py +1 -0
  55. homesec/telemetry/db/log_table.py +16 -0
  56. homesec/telemetry/db_log_handler.py +246 -0
  57. homesec/telemetry/postgres_settings.py +42 -0
  58. homesec-0.1.0.dist-info/METADATA +446 -0
  59. homesec-0.1.0.dist-info/RECORD +62 -0
  60. homesec-0.1.0.dist-info/WHEEL +4 -0
  61. homesec-0.1.0.dist-info/entry_points.txt +2 -0
  62. homesec-0.1.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,246 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import json
5
+ import logging
6
+ import queue
7
+ import sys
8
+ import threading
9
+ import time
10
+ import traceback
11
+ from dataclasses import dataclass
12
+ from datetime import datetime, timezone
13
+ from typing import Any
14
+
15
+ from sqlalchemy import insert
16
+ from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
17
+
18
+ from homesec.telemetry.db.log_table import logs
19
+ from homesec.telemetry.db.log_table import metadata as db_metadata
20
+ from homesec.telemetry.postgres_settings import PostgresConfig
21
+
22
+
23
+ _STANDARD_LOGRECORD_ATTRS = {
24
+ "name",
25
+ "msg",
26
+ "args",
27
+ "levelname",
28
+ "levelno",
29
+ "pathname",
30
+ "filename",
31
+ "module",
32
+ "exc_info",
33
+ "exc_text",
34
+ "stack_info",
35
+ "lineno",
36
+ "funcName",
37
+ "created",
38
+ "msecs",
39
+ "relativeCreated",
40
+ "thread",
41
+ "threadName",
42
+ "processName",
43
+ "process",
44
+ "taskName",
45
+ }
46
+
47
+
48
+ def _utc_iso(ts: float) -> str:
49
+ return datetime.fromtimestamp(ts, tz=timezone.utc).isoformat().replace("+00:00", "Z")
50
+
51
+
52
+ def _record_to_payload(record: logging.LogRecord) -> dict[str, Any]:
53
+ camera_name = getattr(record, "camera_name", None) or None
54
+ recording_id = getattr(record, "recording_id", None)
55
+ if recording_id in ("", "-"):
56
+ recording_id = None
57
+
58
+ event_type = getattr(record, "event_type", None)
59
+ kind = getattr(record, "kind", None) or ("event" if event_type else "log")
60
+
61
+ msg_obj: Any
62
+ if isinstance(record.msg, str):
63
+ msg_obj = record.getMessage()
64
+ else:
65
+ msg_obj = record.msg
66
+
67
+ fields: dict[str, Any] = {}
68
+ for k, v in record.__dict__.items():
69
+ if k in _STANDARD_LOGRECORD_ATTRS:
70
+ continue
71
+ if k in {"camera_name", "recording_id", "event_type", "kind"}:
72
+ continue
73
+ try:
74
+ json.dumps(v, default=str)
75
+ fields[k] = v
76
+ except Exception:
77
+ fields[k] = str(v)
78
+
79
+ payload: dict[str, Any] = {
80
+ "ts": _utc_iso(record.created),
81
+ "created": record.created,
82
+ "level": record.levelname,
83
+ "logger": record.name,
84
+ "module": record.module,
85
+ "lineno": record.lineno,
86
+ "pathname": record.pathname,
87
+ "camera_name": camera_name,
88
+ "recording_id": recording_id,
89
+ "kind": kind,
90
+ "event_type": event_type,
91
+ "message": msg_obj,
92
+ "fields": fields,
93
+ }
94
+
95
+ if record.exc_info:
96
+ payload["exception"] = "".join(traceback.format_exception(*record.exc_info))
97
+ elif record.exc_text:
98
+ payload["exception"] = record.exc_text
99
+
100
+ return payload
101
+
102
+
103
+ @dataclass(frozen=True)
104
+ class _DbRow:
105
+ created_ts: float
106
+ payload: dict[str, Any]
107
+
108
+
109
+ class AsyncPostgresJsonLogHandler(logging.Handler):
110
+ """Best-effort DB log handler using async SQLAlchemy in a worker thread.
111
+
112
+ - `emit()` must never block the caller.
113
+ - When DB is down or queue is full, logs are dropped (with a stderr note).
114
+ """
115
+
116
+ def __init__(self, config: PostgresConfig) -> None:
117
+ super().__init__()
118
+ self.config = config
119
+ self._queue: queue.Queue[_DbRow] = queue.Queue(maxsize=int(config.db_log_queue_size))
120
+ self._stop = threading.Event()
121
+ self._thread = threading.Thread(target=self._run_worker, name="db-log-writer", daemon=True)
122
+ self._started = False
123
+ self._drop_count = 0
124
+ self._schema_ensured = False
125
+
126
+ self.setLevel(getattr(logging, config.db_log_level, logging.INFO))
127
+
128
+ def start(self) -> None:
129
+ if self._started:
130
+ return
131
+ self._started = True
132
+ self._thread.start()
133
+
134
+ def close(self) -> None:
135
+ try:
136
+ self._stop.set()
137
+ finally:
138
+ super().close()
139
+
140
+ def emit(self, record: logging.LogRecord) -> None:
141
+ if not self.config.enabled or not self.config.db_dsn:
142
+ return
143
+ if not self._started:
144
+ self.start()
145
+
146
+ try:
147
+ payload = _record_to_payload(record)
148
+ row = _DbRow(
149
+ created_ts=float(record.created),
150
+ payload=payload,
151
+ )
152
+ except Exception as exc:
153
+ sys.stderr.write(f"[db-log] failed to serialize record: {exc}\n")
154
+ return
155
+
156
+ try:
157
+ self._queue.put_nowait(row)
158
+ except queue.Full:
159
+ if self.config.db_log_drop_policy == "drop_oldest":
160
+ try:
161
+ _ = self._queue.get_nowait()
162
+ except queue.Empty:
163
+ pass
164
+ try:
165
+ self._queue.put_nowait(row)
166
+ return
167
+ except queue.Full:
168
+ pass
169
+ self._drop_count += 1
170
+ if self._drop_count % 100 == 1:
171
+ sys.stderr.write(f"[db-log] queue full; dropping logs (dropped={self._drop_count})\n")
172
+
173
+ def _drain_batch(self) -> list[_DbRow]:
174
+ batch: list[_DbRow] = []
175
+ deadline = time.monotonic() + float(self.config.db_log_flush_s)
176
+ while len(batch) < int(self.config.db_log_batch_size):
177
+ timeout = max(0.0, deadline - time.monotonic())
178
+ try:
179
+ row = self._queue.get(timeout=timeout if batch else timeout)
180
+ except queue.Empty:
181
+ break
182
+ batch.append(row)
183
+ return batch
184
+
185
+ def _run_worker(self) -> None:
186
+ if not self.config.db_dsn:
187
+ return
188
+
189
+ backoff = float(self.config.db_log_backoff_initial_s)
190
+ backoff_max = float(self.config.db_log_backoff_max_s)
191
+
192
+ engine = create_async_engine(self.config.db_dsn, pool_pre_ping=True)
193
+ loop = asyncio.new_event_loop()
194
+ asyncio.set_event_loop(loop)
195
+
196
+ try:
197
+ try:
198
+ loop.run_until_complete(self._ensure_schema(engine))
199
+ self._schema_ensured = True
200
+ except Exception as exc:
201
+ sys.stderr.write(f"[db-log] failed ensuring schema (will retry on flush): {exc}\n")
202
+
203
+ while True:
204
+ if self._stop.is_set() and self._queue.empty():
205
+ break
206
+
207
+ batch = self._drain_batch()
208
+ if not batch:
209
+ continue
210
+
211
+ try:
212
+ loop.run_until_complete(self._flush(engine, batch))
213
+ backoff = float(self.config.db_log_backoff_initial_s)
214
+ except Exception as exc:
215
+ sys.stderr.write(f"[db-log] flush failed: {exc}; backing off {backoff:.1f}s\n")
216
+ time.sleep(backoff)
217
+ backoff = min(backoff * 2.0, backoff_max)
218
+ finally:
219
+ try:
220
+ loop.run_until_complete(engine.dispose())
221
+ except Exception:
222
+ pass
223
+ try:
224
+ loop.close()
225
+ except Exception:
226
+ pass
227
+
228
+ async def _flush(self, engine: AsyncEngine, batch: list[_DbRow]) -> None:
229
+ if not self._schema_ensured:
230
+ await self._ensure_schema(engine)
231
+ self._schema_ensured = True
232
+
233
+ rows = []
234
+ for row in batch:
235
+ rows.append(
236
+ {
237
+ "ts": datetime.fromtimestamp(row.created_ts, tz=timezone.utc),
238
+ "payload": row.payload,
239
+ }
240
+ )
241
+ async with engine.begin() as conn:
242
+ await conn.execute(insert(logs), rows)
243
+
244
+ async def _ensure_schema(self, engine: AsyncEngine) -> None:
245
+ async with engine.begin() as conn:
246
+ await conn.run_sync(db_metadata.create_all)
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Literal
5
+
6
+ from pydantic import field_validator
7
+ from pydantic_settings import BaseSettings, SettingsConfigDict
8
+
9
+ _REPO_DOTENV = Path(__file__).resolve().parents[3] / ".env"
10
+
11
+
12
+ class PostgresConfig(BaseSettings):
13
+ model_config = SettingsConfigDict(
14
+ env_file=(".env", _REPO_DOTENV),
15
+ env_file_encoding="utf-8",
16
+ case_sensitive=False,
17
+ extra="ignore",
18
+ )
19
+
20
+ db_dsn: str | None = None # postgresql+asyncpg://user:pass@host:5432/db
21
+ db_log_level: str = "INFO"
22
+ db_log_queue_size: int = 5000
23
+ db_log_batch_size: int = 100
24
+ db_log_flush_s: float = 1.0
25
+ db_log_backoff_initial_s: float = 1.0
26
+ db_log_backoff_max_s: float = 30.0
27
+ db_log_drop_policy: Literal["drop_new", "drop_oldest"] = "drop_new"
28
+
29
+ @field_validator("db_log_level")
30
+ @classmethod
31
+ def _normalize_level(cls, value: str) -> str:
32
+ return str(value).upper()
33
+
34
+ @property
35
+ def enabled(self) -> bool:
36
+ return bool(self.db_dsn)
37
+
38
+ @property
39
+ def sync_dsn(self) -> str | None:
40
+ if not self.db_dsn:
41
+ return None
42
+ return self.db_dsn.replace("+asyncpg", "")