homesec 1.1.1__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. homesec/__init__.py +1 -1
  2. homesec/app.py +38 -84
  3. homesec/cli.py +6 -10
  4. homesec/config/validation.py +38 -12
  5. homesec/interfaces.py +50 -2
  6. homesec/maintenance/cleanup_clips.py +4 -4
  7. homesec/models/__init__.py +6 -5
  8. homesec/models/alert.py +3 -2
  9. homesec/models/clip.py +4 -2
  10. homesec/models/config.py +62 -17
  11. homesec/models/enums.py +114 -0
  12. homesec/models/events.py +19 -18
  13. homesec/models/filter.py +13 -3
  14. homesec/models/source.py +4 -0
  15. homesec/models/vlm.py +18 -7
  16. homesec/plugins/__init__.py +7 -33
  17. homesec/plugins/alert_policies/__init__.py +34 -59
  18. homesec/plugins/alert_policies/default.py +20 -45
  19. homesec/plugins/alert_policies/noop.py +14 -29
  20. homesec/plugins/analyzers/__init__.py +20 -105
  21. homesec/plugins/analyzers/openai.py +70 -53
  22. homesec/plugins/filters/__init__.py +18 -102
  23. homesec/plugins/filters/yolo.py +103 -66
  24. homesec/plugins/notifiers/__init__.py +20 -56
  25. homesec/plugins/notifiers/mqtt.py +22 -30
  26. homesec/plugins/notifiers/sendgrid_email.py +34 -32
  27. homesec/plugins/registry.py +160 -0
  28. homesec/plugins/sources/__init__.py +45 -0
  29. homesec/plugins/sources/ftp.py +25 -0
  30. homesec/plugins/sources/local_folder.py +30 -0
  31. homesec/plugins/sources/rtsp.py +27 -0
  32. homesec/plugins/storage/__init__.py +18 -88
  33. homesec/plugins/storage/dropbox.py +36 -37
  34. homesec/plugins/storage/local.py +8 -29
  35. homesec/plugins/utils.py +8 -4
  36. homesec/repository/clip_repository.py +20 -14
  37. homesec/sources/base.py +24 -2
  38. homesec/sources/local_folder.py +57 -78
  39. homesec/sources/rtsp.py +45 -4
  40. homesec/state/postgres.py +46 -17
  41. {homesec-1.1.1.dist-info → homesec-1.2.0.dist-info}/METADATA +1 -1
  42. homesec-1.2.0.dist-info/RECORD +68 -0
  43. homesec-1.1.1.dist-info/RECORD +0 -62
  44. {homesec-1.1.1.dist-info → homesec-1.2.0.dist-info}/WHEEL +0 -0
  45. {homesec-1.1.1.dist-info → homesec-1.2.0.dist-info}/entry_points.txt +0 -0
  46. {homesec-1.1.1.dist-info → homesec-1.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -3,77 +3,52 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
- from collections.abc import Callable
7
- from dataclasses import dataclass
8
- from typing import TypeVar
9
-
10
- from pydantic import BaseModel
6
+ from typing import Any, cast
11
7
 
12
8
  from homesec.interfaces import AlertPolicy
13
9
  from homesec.models.config import AlertPolicyOverrides
10
+ from homesec.plugins.alert_policies.noop import NoopAlertPolicySettings
11
+ from homesec.plugins.registry import PluginType, load_plugin
14
12
 
15
13
  logger = logging.getLogger(__name__)
16
14
 
17
15
 
18
- AlertPolicyFactory = Callable[[BaseModel, dict[str, AlertPolicyOverrides], list[str]], AlertPolicy]
19
-
20
-
21
- @dataclass(frozen=True)
22
- class AlertPolicyPlugin:
23
- name: str
24
- config_model: type[BaseModel]
25
- factory: AlertPolicyFactory
26
-
27
-
28
- ALERT_POLICY_REGISTRY: dict[str, AlertPolicyPlugin] = {}
29
-
30
-
31
- def register_alert_policy(plugin: AlertPolicyPlugin) -> None:
32
- """Register an alert policy plugin with collision detection.
16
+ def load_alert_policy(
17
+ config: Any, # AlertPolicyConfig but trying to avoid circular import if possible
18
+ per_camera_overrides: dict[str, AlertPolicyOverrides],
19
+ trigger_classes: list[str],
20
+ ) -> AlertPolicy:
21
+ """Load and instantiate an alert policy plugin.
33
22
 
34
23
  Args:
35
- plugin: Alert policy plugin to register
36
-
37
- Raises:
38
- ValueError: If a plugin with the same name is already registered
39
- """
40
- if plugin.name in ALERT_POLICY_REGISTRY:
41
- raise ValueError(
42
- f"Alert policy plugin '{plugin.name}' is already registered. "
43
- f"Plugin names must be unique across all alert policy plugins."
44
- )
45
- ALERT_POLICY_REGISTRY[plugin.name] = plugin
46
-
47
-
48
- T = TypeVar("T", bound=Callable[[], AlertPolicyPlugin])
49
-
50
-
51
- def alert_policy_plugin(name: str) -> Callable[[T], T]:
52
- """Decorator to register an alert policy plugin.
53
-
54
- Usage:
55
- @alert_policy_plugin(name="my_policy")
56
- def my_policy_plugin() -> AlertPolicyPlugin:
57
- return AlertPolicyPlugin(...)
58
-
59
- Args:
60
- name: Plugin name (for validation only - must match plugin.name)
24
+ config: Alert policy configuration (AlertPolicyConfig)
25
+ per_camera_overrides: Map of camera name to override settings
26
+ trigger_classes: List of object classes that trigger analysis
61
27
 
62
28
  Returns:
63
- Decorator function that registers the plugin
29
+ Configured AlertPolicy instance
64
30
  """
31
+ # Handle disabled -> noop fallback
32
+ if not config.enabled:
33
+ return cast(
34
+ AlertPolicy,
35
+ load_plugin(
36
+ PluginType.ALERT_POLICY,
37
+ "noop",
38
+ NoopAlertPolicySettings(),
39
+ ),
40
+ )
65
41
 
66
- def decorator(factory_fn: T) -> T:
67
- plugin = factory_fn()
68
- register_alert_policy(plugin)
69
- return factory_fn
70
-
71
- return decorator
42
+ return cast(
43
+ AlertPolicy,
44
+ load_plugin(
45
+ PluginType.ALERT_POLICY,
46
+ config.backend,
47
+ config.config,
48
+ overrides=per_camera_overrides,
49
+ trigger_classes=trigger_classes,
50
+ ),
51
+ )
72
52
 
73
53
 
74
- __all__ = [
75
- "AlertPolicyPlugin",
76
- "ALERT_POLICY_REGISTRY",
77
- "register_alert_policy",
78
- "alert_policy_plugin",
79
- ]
54
+ __all__ = ["load_alert_policy"]
@@ -4,26 +4,27 @@ from __future__ import annotations
4
4
 
5
5
  from homesec.interfaces import AlertPolicy
6
6
  from homesec.models.alert import AlertDecision
7
- from homesec.models.config import AlertPolicyOverrides, DefaultAlertPolicySettings
7
+ from homesec.models.config import DefaultAlertPolicySettings
8
+ from homesec.models.enums import RiskLevel
8
9
  from homesec.models.filter import FilterResult
9
10
  from homesec.models.vlm import AnalysisResult
10
-
11
- # Risk level ordering for comparison
12
- RISK_LEVELS = {"low": 0, "medium": 1, "high": 2, "critical": 3}
11
+ from homesec.plugins.registry import PluginType, plugin
13
12
 
14
13
 
14
+ @plugin(plugin_type=PluginType.ALERT_POLICY, name="default")
15
15
  class DefaultAlertPolicy(AlertPolicy):
16
16
  """Default alert policy implementation."""
17
17
 
18
- def __init__(
19
- self,
20
- settings: DefaultAlertPolicySettings,
21
- overrides: dict[str, AlertPolicyOverrides],
22
- trigger_classes: list[str],
23
- ) -> None:
18
+ config_cls = DefaultAlertPolicySettings
19
+
20
+ @classmethod
21
+ def create(cls, config: DefaultAlertPolicySettings) -> AlertPolicy:
22
+ return cls(config)
23
+
24
+ def __init__(self, settings: DefaultAlertPolicySettings) -> None:
24
25
  self._settings = settings
25
- self._overrides = overrides
26
- self._trigger_classes = list(trigger_classes)
26
+ self._overrides = settings.overrides
27
+ self._trigger_classes = list(settings.trigger_classes)
27
28
 
28
29
  def should_notify(
29
30
  self,
@@ -72,41 +73,15 @@ class DefaultAlertPolicy(AlertPolicy):
72
73
  }
73
74
  return DefaultAlertPolicySettings.model_validate(merged)
74
75
 
75
- def _risk_meets_threshold(self, actual: str, threshold: str) -> bool:
76
- return RISK_LEVELS.get(actual, 0) >= RISK_LEVELS.get(threshold, 0)
76
+ def _risk_meets_threshold(self, actual: RiskLevel, threshold: RiskLevel) -> bool:
77
+ """Check if actual risk level meets or exceeds threshold.
78
+
79
+ Uses IntEnum comparison for natural ordering:
80
+ RiskLevel.HIGH >= RiskLevel.MEDIUM # True
81
+ """
82
+ return actual >= threshold
77
83
 
78
84
  def _filter_detected_trigger_classes(self, filter_result: FilterResult) -> bool:
79
85
  detected = set(filter_result.detected_classes)
80
86
  trigger = set(self._trigger_classes)
81
87
  return bool(detected & trigger)
82
-
83
-
84
- # Plugin registration
85
- from pydantic import BaseModel
86
-
87
- from homesec.interfaces import AlertPolicy
88
- from homesec.plugins.alert_policies import AlertPolicyPlugin, alert_policy_plugin
89
-
90
-
91
- @alert_policy_plugin(name="default")
92
- def default_alert_policy_plugin() -> AlertPolicyPlugin:
93
- """Default alert policy plugin factory.
94
-
95
- Returns:
96
- AlertPolicyPlugin for default risk-based alert policy
97
- """
98
- from homesec.models.config import DefaultAlertPolicySettings
99
-
100
- def factory(
101
- cfg: BaseModel,
102
- overrides: dict[str, AlertPolicyOverrides],
103
- trigger_classes: list[str],
104
- ) -> AlertPolicy:
105
- settings = DefaultAlertPolicySettings.model_validate(cfg)
106
- return DefaultAlertPolicy(settings, overrides, trigger_classes)
107
-
108
- return AlertPolicyPlugin(
109
- name="default",
110
- config_model=DefaultAlertPolicySettings,
111
- factory=factory,
112
- )
@@ -6,14 +6,27 @@ from pydantic import BaseModel
6
6
 
7
7
  from homesec.interfaces import AlertPolicy
8
8
  from homesec.models.alert import AlertDecision
9
- from homesec.models.config import AlertPolicyOverrides
10
9
  from homesec.models.filter import FilterResult
11
10
  from homesec.models.vlm import AnalysisResult
11
+ from homesec.plugins.registry import PluginType, plugin
12
12
 
13
13
 
14
+ class NoopAlertPolicySettings(BaseModel):
15
+ """Settings for no-op alert policy (empty - no configuration needed)."""
16
+
17
+ model_config = {"extra": "forbid"}
18
+
19
+
20
+ @plugin(plugin_type=PluginType.ALERT_POLICY, name="noop")
14
21
  class NoopAlertPolicy(AlertPolicy):
15
22
  """Alert policy that never notifies."""
16
23
 
24
+ config_cls = NoopAlertPolicySettings
25
+
26
+ @classmethod
27
+ def create(cls, config: NoopAlertPolicySettings) -> AlertPolicy:
28
+ return cls()
29
+
17
30
  def should_notify(
18
31
  self,
19
32
  camera_name: str,
@@ -29,31 +42,3 @@ class NoopAlertPolicy(AlertPolicy):
29
42
  analysis: AnalysisResult | None,
30
43
  ) -> AlertDecision:
31
44
  return AlertDecision(notify=False, notify_reason="alert_policy_disabled")
32
-
33
-
34
- # Plugin registration
35
- from homesec.interfaces import AlertPolicy
36
- from homesec.plugins.alert_policies import AlertPolicyPlugin, alert_policy_plugin
37
-
38
-
39
- @alert_policy_plugin(name="noop")
40
- def noop_alert_policy_plugin() -> AlertPolicyPlugin:
41
- """Noop alert policy plugin that never sends alerts.
42
-
43
- Returns:
44
- AlertPolicyPlugin for no-op alert policy
45
- """
46
-
47
- def factory(
48
- cfg: BaseModel,
49
- overrides: dict[str, AlertPolicyOverrides],
50
- trigger_classes: list[str],
51
- ) -> AlertPolicy:
52
- # NoopAlertPolicy doesn't use any config
53
- return NoopAlertPolicy()
54
-
55
- return AlertPolicyPlugin(
56
- name="noop",
57
- config_model=BaseModel, # No config needed
58
- factory=factory,
59
- )
@@ -1,125 +1,40 @@
1
- """VLM analyzer plugins and registry."""
1
+ """Analyzer plugins and registry."""
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
- from collections.abc import Callable
7
- from dataclasses import dataclass
8
- from typing import TYPE_CHECKING, TypeVar
9
-
10
- from pydantic import BaseModel
6
+ from typing import cast
11
7
 
12
8
  from homesec.interfaces import VLMAnalyzer
13
-
14
- if TYPE_CHECKING:
15
- from homesec.models.vlm import VLMConfig
9
+ from homesec.models.vlm import VLMConfig
10
+ from homesec.plugins.registry import PluginType, load_plugin
16
11
 
17
12
  logger = logging.getLogger(__name__)
18
13
 
19
- # Type alias for clarity
20
- VLMFactory = Callable[["VLMConfig"], VLMAnalyzer]
21
-
22
-
23
- @dataclass(frozen=True)
24
- class VLMPlugin:
25
- """Metadata for a VLM analyzer plugin."""
26
-
27
- name: str
28
- config_model: type[BaseModel]
29
- factory: VLMFactory
30
-
31
-
32
- VLM_REGISTRY: dict[str, VLMPlugin] = {}
33
-
34
-
35
- def register_vlm(plugin: VLMPlugin) -> None:
36
- """Register a VLM plugin with collision detection.
37
-
38
- Args:
39
- plugin: VLM plugin to register
40
-
41
- Raises:
42
- ValueError: If a plugin with the same name is already registered
43
- """
44
- if plugin.name in VLM_REGISTRY:
45
- raise ValueError(
46
- f"VLM plugin '{plugin.name}' is already registered. "
47
- f"Plugin names must be unique across all VLM plugins."
48
- )
49
- VLM_REGISTRY[plugin.name] = plugin
50
-
51
-
52
- T = TypeVar("T", bound=Callable[[], VLMPlugin])
53
-
54
14
 
55
- def vlm_plugin(name: str) -> Callable[[T], T]:
56
- """Decorator to register a VLM analyzer plugin.
57
-
58
- Usage:
59
- @vlm_plugin(name="my_vlm")
60
- def my_vlm_plugin() -> VLMPlugin:
61
- return VLMPlugin(...)
15
+ def load_analyzer(config: VLMConfig) -> VLMAnalyzer:
16
+ """Load and instantiate a VLM analyzer plugin.
62
17
 
63
18
  Args:
64
- name: Plugin name (for validation only - must match plugin.name)
19
+ config: VLM configuration
65
20
 
66
21
  Returns:
67
- Decorator function that registers the plugin
68
- """
69
-
70
- def decorator(factory_fn: T) -> T:
71
- plugin = factory_fn()
72
- register_vlm(plugin)
73
- return factory_fn
74
-
75
- return decorator
76
-
77
-
78
- def load_vlm_plugin(config: VLMConfig) -> VLMAnalyzer:
79
- """Load VLM plugin by name from config.
80
-
81
- Validates the llm dict against the plugin's config_model and creates
82
- a VLMConfig with the validated settings object.
83
-
84
- Args:
85
- config: VLM configuration with backend name and raw llm dict
86
-
87
- Returns:
88
- Instantiated VLM plugin
22
+ Configured VLMAnalyzer instance
89
23
 
90
24
  Raises:
91
- ValueError: If plugin name is unknown or config validation fails
25
+ ValueError: If backend not found in registry
26
+ ValidationError: If config validation fails
92
27
  """
93
- plugin_name = config.backend.lower()
94
-
95
- if plugin_name not in VLM_REGISTRY:
96
- available = ", ".join(sorted(VLM_REGISTRY.keys()))
97
- raise ValueError(f"Unknown VLM plugin: '{plugin_name}'. Available: {available}")
98
-
99
- plugin = VLM_REGISTRY[plugin_name]
100
-
101
- # Validate config.llm dict against plugin's config_model
102
- validated_llm_settings = plugin.config_model.model_validate(config.llm)
103
-
104
- # Create new VLMConfig with validated llm settings object
105
- from homesec.models.vlm import VLMConfig as VLMConfigModel
106
-
107
- validated_config = VLMConfigModel(
108
- backend=config.backend,
109
- trigger_classes=config.trigger_classes,
110
- max_workers=config.max_workers,
111
- llm=validated_llm_settings,
112
- preprocessing=config.preprocessing,
28
+ return cast(
29
+ VLMAnalyzer,
30
+ load_plugin(
31
+ PluginType.ANALYZER,
32
+ config.backend,
33
+ config.llm,
34
+ trigger_classes=config.trigger_classes,
35
+ max_workers=config.max_workers,
36
+ ),
113
37
  )
114
38
 
115
- return plugin.factory(validated_config)
116
-
117
39
 
118
- __all__ = [
119
- "VLMPlugin",
120
- "VLMFactory",
121
- "VLM_REGISTRY",
122
- "register_vlm",
123
- "vlm_plugin",
124
- "load_vlm_plugin",
125
- ]
40
+ __all__ = ["load_analyzer"]
@@ -8,10 +8,25 @@ import json
8
8
  import logging
9
9
  import os
10
10
  from pathlib import Path
11
+ from typing import Any
12
+
13
+ aiohttp: Any
14
+ cv2: Any
15
+ Image: Any
16
+
17
+ try:
18
+ import aiohttp as _aiohttp
19
+ import cv2 as _cv2
20
+ from PIL import Image as _Image
21
+ except Exception:
22
+ aiohttp = None
23
+ cv2 = None
24
+ Image = None
25
+ else:
26
+ aiohttp = _aiohttp
27
+ cv2 = _cv2
28
+ Image = _Image
11
29
 
12
- import aiohttp
13
- import cv2
14
- from PIL import Image
15
30
  from pydantic import BaseModel
16
31
 
17
32
  from homesec.interfaces import VLMAnalyzer
@@ -23,6 +38,7 @@ from homesec.models.vlm import (
23
38
  VLMConfig,
24
39
  VLMPreprocessConfig,
25
40
  )
41
+ from homesec.plugins.registry import PluginType, plugin
26
42
 
27
43
  logger = logging.getLogger(__name__)
28
44
 
@@ -45,6 +61,15 @@ Focus on KEY EVENTS ONLY:
45
61
  Keep observations list concise (short bullet points of security-relevant actions)."""
46
62
 
47
63
 
64
+ def _ensure_openai_dependencies() -> None:
65
+ """Fail fast with a clear error if OpenAI VLM dependencies are missing."""
66
+ if aiohttp is None or cv2 is None or Image is None:
67
+ raise RuntimeError(
68
+ "Missing dependency for OpenAI VLM. "
69
+ "Install with: uv pip install aiohttp opencv-python pillow"
70
+ )
71
+
72
+
48
73
  def _create_json_schema_format(
49
74
  schema_model: type[BaseModel], schema_name: str
50
75
  ) -> dict[str, object]:
@@ -59,6 +84,7 @@ def _create_json_schema_format(
59
84
  }
60
85
 
61
86
 
87
+ @plugin(plugin_type=PluginType.ANALYZER, name="openai")
62
88
  class OpenAIVLM(VLMAnalyzer):
63
89
  """OpenAI-compatible VLM analyzer plugin.
64
90
 
@@ -66,49 +92,43 @@ class OpenAIVLM(VLMAnalyzer):
66
92
  Supports structured output with Pydantic schemas.
67
93
  """
68
94
 
69
- def __init__(self, config: VLMConfig) -> None:
70
- """Initialize OpenAI VLM with config validation.
71
-
72
- Required config:
73
- llm.api_key_env: Env var name with API key
74
- llm.model: Model name (e.g., gpt-4o)
75
-
76
- Optional config:
77
- llm.base_url: API base URL (default: https://api.openai.com/v1)
78
- llm.token_param: max_tokens or max_completion_tokens
79
- llm.max_completion_tokens/max_tokens: Token limits
80
- llm.temperature: Temperature (None to omit)
81
- preprocessing.max_frames: Maximum frames to send (default: 10)
82
- preprocessing.max_size: Max image dimension (default: 1024)
83
- preprocessing.quality: JPEG quality (default: 85)
95
+ config_cls = OpenAILLMConfig
96
+
97
+ @classmethod
98
+ def create(cls, config: OpenAILLMConfig) -> VLMAnalyzer:
99
+ return cls(config)
100
+
101
+ def __init__(self, llm_config: OpenAILLMConfig) -> None:
102
+ """Initialize OpenAI VLM with validated LLM config.
103
+
104
+ Args:
105
+ llm_config: OpenAI-specific configuration (API key, model, etc.)
106
+ Also assumes injected runtime fields (trigger_classes, max_workers)
84
107
  """
85
- if not isinstance(config.llm, OpenAILLMConfig):
86
- raise ValueError("OpenAIVLM requires llm=OpenAILLMConfig")
87
- llm = config.llm
88
- preprocess = config.preprocessing
108
+ _ensure_openai_dependencies()
109
+ self._config = llm_config
89
110
 
90
111
  # Get API key from env
91
- self._api_key_env = llm.api_key_env
112
+ self._api_key_env = llm_config.api_key_env
92
113
  self.api_key = os.getenv(self._api_key_env)
93
114
  if not self.api_key:
94
115
  raise ValueError(f"API key not found in env: {self._api_key_env}")
95
116
 
96
- self.model = llm.model
97
- self.base_url = llm.base_url
117
+ self.model = llm_config.model
118
+ self.base_url = llm_config.base_url
98
119
  self.system_prompt = DEFAULT_SYSTEM_PROMPT
99
- self.temperature = llm.temperature
100
- self.token_param = llm.token_param
101
- self.max_tokens = self._resolve_token_limit(llm)
102
- self.request_timeout = float(llm.request_timeout)
120
+ self.temperature = llm_config.temperature
121
+ self.token_param = llm_config.token_param
122
+ self.max_tokens = self._resolve_token_limit(llm_config)
123
+ self.request_timeout = float(llm_config.request_timeout)
103
124
 
104
125
  # Create HTTP session
105
126
  self._session: aiohttp.ClientSession | None = None
106
127
  self._shutdown_called = False
107
128
 
108
129
  logger.info(
109
- "OpenAIVLM initialized: model=%s, max_frames=%d, token_param=%s, temperature=%s",
130
+ "OpenAIVLM initialized: model=%s, token_param=%s, temperature=%s",
110
131
  self.model,
111
- preprocess.max_frames,
112
132
  self.token_param,
113
133
  self.temperature if self.temperature is not None else "default",
114
134
  )
@@ -116,6 +136,8 @@ class OpenAIVLM(VLMAnalyzer):
116
136
  async def _ensure_session(self) -> aiohttp.ClientSession:
117
137
  """Lazy-create aiohttp session with timeout."""
118
138
  if self._session is None:
139
+ if aiohttp is None:
140
+ raise RuntimeError("aiohttp dependency is required for OpenAI VLM")
119
141
  timeout = aiohttp.ClientTimeout(total=self.request_timeout)
120
142
  self._session = aiohttp.ClientSession(timeout=timeout)
121
143
  return self._session
@@ -256,7 +278,8 @@ class OpenAIVLM(VLMAnalyzer):
256
278
  async with session.post(url, json=payload, headers=headers) as resp:
257
279
  if resp.status != 200:
258
280
  error_text = await resp.text()
259
- raise RuntimeError(f"OpenAI API error {resp.status}: {error_text}")
281
+ logger.debug("OpenAI API error details: %s", error_text)
282
+ raise RuntimeError(f"OpenAI API error: HTTP {resp.status}")
260
283
 
261
284
  data = await resp.json()
262
285
  if not isinstance(data, dict):
@@ -330,6 +353,9 @@ class OpenAIVLM(VLMAnalyzer):
330
353
 
331
354
  Returns list of (base64 JPEG, timestamp) tuples.
332
355
  """
356
+ if cv2 is None or Image is None:
357
+ raise RuntimeError("OpenAI VLM dependencies are not available")
358
+
333
359
  cap = cv2.VideoCapture(str(video_path))
334
360
  total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
335
361
 
@@ -413,25 +439,16 @@ class OpenAIVLM(VLMAnalyzer):
413
439
 
414
440
  logger.info("OpenAIVLM shutdown complete")
415
441
 
442
+ async def ping(self) -> bool:
443
+ """Health check - verify API is reachable.
416
444
 
417
- # Plugin registration
418
- from homesec.plugins.analyzers import VLMPlugin, vlm_plugin
419
-
420
-
421
- @vlm_plugin(name="openai")
422
- def openai_vlm_plugin() -> VLMPlugin:
423
- """OpenAI VLM plugin factory.
424
-
425
- Returns:
426
- VLMPlugin for OpenAI vision-language model
427
- """
428
- from homesec.models.vlm import OpenAILLMConfig
429
-
430
- def factory(cfg: VLMConfig) -> VLMAnalyzer:
431
- return OpenAIVLM(cfg)
432
-
433
- return VLMPlugin(
434
- name="openai",
435
- config_model=OpenAILLMConfig,
436
- factory=factory,
437
- )
445
+ Note: This checks if session is alive and not shut down.
446
+ A full API connectivity check would require an API call.
447
+ """
448
+ if self._shutdown_called:
449
+ return False
450
+ # Session being None is fine - it's lazy-created
451
+ # If session exists and is closed, that's a problem
452
+ if self._session is not None and self._session.closed:
453
+ return False
454
+ return True