matrice-inference 0.1.2__tar.gz → 0.1.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matrice-inference might be problematic. Click here for more details.

Files changed (51) hide show
  1. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/PKG-INFO +1 -1
  2. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/matrice_inference.egg-info/PKG-INFO +1 -1
  3. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/matrice_inference.egg-info/SOURCES.txt +1 -0
  4. matrice_inference-0.1.23/src/matrice_inference/__init__.py +89 -0
  5. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/__init__.py +17 -11
  6. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/model/triton_server.py +1 -3
  7. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/server.py +46 -4
  8. matrice_inference-0.1.23/src/matrice_inference/server/stream/camera_config_monitor.py +221 -0
  9. matrice_inference-0.1.23/src/matrice_inference/server/stream/consumer_worker.py +550 -0
  10. matrice_inference-0.1.23/src/matrice_inference/server/stream/frame_cache.py +350 -0
  11. matrice_inference-0.1.23/src/matrice_inference/server/stream/inference_worker.py +261 -0
  12. matrice_inference-0.1.23/src/matrice_inference/server/stream/post_processing_worker.py +302 -0
  13. matrice_inference-0.1.23/src/matrice_inference/server/stream/producer_worker.py +204 -0
  14. matrice_inference-0.1.23/src/matrice_inference/server/stream/stream_pipeline.py +435 -0
  15. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/analytics.py +1 -1
  16. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/overall_inference_testing.py +0 -4
  17. matrice_inference-0.1.2/src/matrice_inference/__init__.py +0 -72
  18. matrice_inference-0.1.2/src/matrice_inference/server/stream/consumer_worker.py +0 -201
  19. matrice_inference-0.1.2/src/matrice_inference/server/stream/frame_cache.py +0 -127
  20. matrice_inference-0.1.2/src/matrice_inference/server/stream/inference_worker.py +0 -163
  21. matrice_inference-0.1.2/src/matrice_inference/server/stream/post_processing_worker.py +0 -230
  22. matrice_inference-0.1.2/src/matrice_inference/server/stream/producer_worker.py +0 -147
  23. matrice_inference-0.1.2/src/matrice_inference/server/stream/stream_pipeline.py +0 -451
  24. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/LICENSE.txt +0 -0
  25. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/README.md +0 -0
  26. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/matrice_inference.egg-info/dependency_links.txt +0 -0
  27. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/matrice_inference.egg-info/not-zip-safe +0 -0
  28. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/matrice_inference.egg-info/top_level.txt +0 -0
  29. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/pyproject.toml +0 -0
  30. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/setup.cfg +0 -0
  31. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/setup.py +0 -0
  32. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/py.typed +0 -0
  33. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/inference_interface.py +0 -0
  34. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/model/__init__.py +0 -0
  35. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/model/model_manager.py +0 -0
  36. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/model/model_manager_wrapper.py +0 -0
  37. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/model/triton_model_manager.py +0 -0
  38. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/proxy_interface.py +0 -0
  39. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/stream/__init__.py +0 -0
  40. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/stream/app_deployment.py +0 -0
  41. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/server/stream/utils.py +0 -0
  42. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/abstract_model_manager.py +0 -0
  43. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/__init__.py +0 -0
  44. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/aggregator.py +0 -0
  45. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/ingestor.py +0 -0
  46. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/latency.py +0 -0
  47. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/pipeline.py +0 -0
  48. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/publisher.py +0 -0
  49. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/aggregator/synchronizer.py +0 -0
  50. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/batch_manager.py +0 -0
  51. {matrice_inference-0.1.2 → matrice_inference-0.1.23}/src/matrice_inference/tmp/triton_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_inference
3
- Version: 0.1.2
3
+ Version: 0.1.23
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_inference
3
- Version: 0.1.2
3
+ Version: 0.1.23
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -20,6 +20,7 @@ src/matrice_inference/server/model/triton_model_manager.py
20
20
  src/matrice_inference/server/model/triton_server.py
21
21
  src/matrice_inference/server/stream/__init__.py
22
22
  src/matrice_inference/server/stream/app_deployment.py
23
+ src/matrice_inference/server/stream/camera_config_monitor.py
23
24
  src/matrice_inference/server/stream/consumer_worker.py
24
25
  src/matrice_inference/server/stream/frame_cache.py
25
26
  src/matrice_inference/server/stream/inference_worker.py
@@ -0,0 +1,89 @@
1
+ """Module providing __init__ functionality."""
2
+
3
+ import os
4
+ import sys
5
+ import platform
6
+ from matrice_common.utils import dependencies_check
7
+
8
+ base = [
9
+ "httpx",
10
+ "fastapi",
11
+ "uvicorn",
12
+ "pillow",
13
+ "confluent_kafka[snappy]",
14
+ "aiokafka",
15
+ "aiohttp",
16
+ "filterpy",
17
+ "scipy",
18
+ "scikit-learn",
19
+ "matplotlib",
20
+ "scikit-image",
21
+ "python-snappy",
22
+ "pyyaml",
23
+ "imagehash",
24
+ "Pillow",
25
+ "transformers"
26
+ ]
27
+
28
+ # Helper to attempt installation and verify importability
29
+ def _install_and_verify(pkg: str, import_name: str):
30
+ """Install a package expression and return True if the import succeeds."""
31
+ try:
32
+ if pkg=='onnxruntime-gpu':
33
+ pkg = 'onnxruntime'
34
+ __import__(pkg)
35
+ return True
36
+ except:
37
+ if dependencies_check([pkg]):
38
+ try:
39
+ __import__(import_name)
40
+ return True
41
+ except ImportError:
42
+ return False
43
+ return False
44
+
45
+ # Runtime gating for optional OCR bootstrap (default OFF), and never on Jetson
46
+ _ENABLE_OCR_BOOTSTRAP = os.getenv("MATRICE_ENABLE_OCR_BOOTSTRAP", "0")
47
+ _IS_JETSON = (platform.machine().lower() in ("aarch64", "arm64"))
48
+
49
+ print("*******************************Deployment ENV Info**********************************")
50
+ print(f"ENABLE_JETSON_PIP_SETTINGS: {_ENABLE_OCR_BOOTSTRAP}") #0 if OFF, 1 if ON, this will be set to 1 in jetson byom codebase.
51
+ print(f"IS_JETSON_ARCH?: {_IS_JETSON}") #True if Jetson, False otherwise
52
+ print("*************************************************************************************")
53
+
54
+ if not int(_ENABLE_OCR_BOOTSTRAP) and not _IS_JETSON:
55
+ # Install base dependencies first
56
+ dependencies_check(base)
57
+
58
+ if not dependencies_check(["opencv-python"]):
59
+ dependencies_check(["opencv-python-headless"])
60
+
61
+ # Attempt GPU-specific dependencies first
62
+ _gpu_ok = _install_and_verify("onnxruntime-gpu", "onnxruntime") and _install_and_verify(
63
+ "fast-plate-ocr[onnx-gpu]", "fast_plate_ocr"
64
+ )
65
+
66
+ if not _gpu_ok:
67
+ # Fallback to CPU variants
68
+ _cpu_ok = _install_and_verify("onnxruntime", "onnxruntime") and _install_and_verify(
69
+ "fast-plate-ocr[onnx]", "fast_plate_ocr"
70
+ )
71
+ if not _cpu_ok:
72
+ # Last-chance fallback without extras tag (PyPI sometimes lacks them)
73
+ _install_and_verify("fast-plate-ocr", "fast_plate_ocr")
74
+
75
+ # matrice_deps = ["matrice_common", "matrice_analytics", "matrice"]
76
+
77
+ # dependencies_check(matrice_deps)
78
+ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
79
+ from server.server import MatriceDeployServer # noqa: E402
80
+ from server.server import MatriceDeployServer as MatriceDeploy # noqa: E402 # Keep this for backwards compatibility
81
+ from server.inference_interface import InferenceInterface # noqa: E402
82
+ from server.proxy_interface import MatriceProxyInterface # noqa: E402
83
+
84
+ __all__ = [
85
+ "MatriceDeploy",
86
+ "MatriceDeployServer",
87
+ "InferenceInterface",
88
+ "MatriceProxyInterface",
89
+ ]
@@ -1,23 +1,29 @@
1
1
  import os
2
2
  import logging
3
3
 
4
- # Root logger
5
- logging.basicConfig(level=logging.DEBUG)
4
+ # Define paths
5
+ log_path = os.path.join(os.getcwd(), "deploy_server.log")
6
6
 
7
- # Console handler (INFO+)
7
+ # Create handlers explicitly
8
8
  console_handler = logging.StreamHandler()
9
- console_handler.setLevel(logging.INFO)
10
-
11
- # File handler (DEBUG+)
12
- log_path = os.path.join(os.getcwd(), "deploy_server.log")
13
9
  file_handler = logging.FileHandler(log_path)
10
+
11
+ # Set levels
12
+ console_handler.setLevel(logging.INFO)
14
13
  file_handler.setLevel(logging.DEBUG)
15
14
 
16
- # Formatter
15
+ # Define a formatter
17
16
  formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
18
17
  console_handler.setFormatter(formatter)
19
18
  file_handler.setFormatter(formatter)
20
19
 
21
- # Add handlers to root logger
22
- logging.getLogger().addHandler(console_handler)
23
- logging.getLogger().addHandler(file_handler)
20
+ # Get the root logger
21
+ logger = logging.getLogger()
22
+ logger.setLevel(logging.DEBUG) # Root level must be the lowest (DEBUG)
23
+
24
+ # Optional: remove any default handlers if basicConfig was called earlier
25
+ if logger.hasHandlers():
26
+ logger.handlers.clear()
27
+
28
+ logger.addHandler(console_handler)
29
+ logger.addHandler(file_handler)
@@ -17,8 +17,6 @@ from matrice_common.utils import dependencies_check
17
17
  TRITON_DOCKER_IMAGE = "nvcr.io/nvidia/tritonserver:23.08-py3"
18
18
  BASE_PATH = "./model_repository"
19
19
 
20
- logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
21
-
22
20
  class TritonServer:
23
21
  def __init__(
24
22
  self,
@@ -1161,7 +1159,7 @@ class TritonInference:
1161
1159
  model_alphabet: str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_",
1162
1160
  return_confidence: bool = True,
1163
1161
  confidence_threshold: float = 0.0, # Disabled threshold to match ONNX
1164
- ) -> Tuple[list[str], np.ndarray] | list[str]:
1162
+ ) -> Union[Tuple[List[str], np.ndarray], List[str]]:
1165
1163
  """Postprocess OCR model outputs into license plate strings.
1166
1164
 
1167
1165
  Args:
@@ -131,6 +131,7 @@ class MatriceDeployServer:
131
131
  self.streaming_pipeline = None
132
132
  self.app_deployment = None
133
133
  self.stream_manager = None
134
+ self.camera_config_monitor = None
134
135
 
135
136
  # Initialize utilities
136
137
  self.utils = None
@@ -320,7 +321,10 @@ class MatriceDeployServer:
320
321
  post_processing_config = {}
321
322
  if isinstance(post_processing_config, dict):
322
323
  post_processing_config["facial_recognition_server_id"] = self.job_params.get("facial_recognition_server_id", None)
324
+ post_processing_config["lpr_server_id"] = self.job_params.get("lpr_server_id", None)
323
325
  post_processing_config["session"] = self.session # Pass the session to post-processing
326
+ # Pass deployment_id for facial recognition deployment update
327
+ post_processing_config["deployment_id"] = self.deployment_id
324
328
 
325
329
  # Get index_to_category from action_tracker if available
326
330
  index_to_category = None
@@ -374,10 +378,6 @@ class MatriceDeployServer:
374
378
  self.streaming_pipeline = StreamingPipeline(
375
379
  inference_interface=self.inference_interface,
376
380
  post_processor=self.post_processor,
377
- consumer_threads=self.job_params.get("consumer_threads", 4),
378
- producer_threads=self.job_params.get("producer_threads", 2),
379
- inference_threads=self.job_params.get("inference_threads", 4),
380
- postprocessing_threads=self.job_params.get("postprocessing_threads", 2),
381
381
  inference_queue_maxsize=self.job_params.get("inference_queue_maxsize", 5000),
382
382
  postproc_queue_maxsize=self.job_params.get("postproc_queue_maxsize", 5000),
383
383
  output_queue_maxsize=self.job_params.get("output_queue_maxsize", 5000),
@@ -402,6 +402,9 @@ class MatriceDeployServer:
402
402
 
403
403
  logging.info("Streaming pipeline initialized successfully")
404
404
 
405
+ # Start camera config monitor if app deployment is available
406
+ self._start_camera_config_monitor()
407
+
405
408
  except Exception as e:
406
409
  logging.error(f"Failed to initialize streaming pipeline: {str(e)}")
407
410
  raise
@@ -416,6 +419,37 @@ class MatriceDeployServer:
416
419
  finally:
417
420
  loop.close()
418
421
 
422
+ def _start_camera_config_monitor(self):
423
+ """Start the camera config monitor if app deployment is available."""
424
+ try:
425
+ if not self.app_deployment:
426
+ logging.info("No app deployment configured, skipping camera config monitor")
427
+ return
428
+
429
+ if not self.streaming_pipeline:
430
+ logging.warning("Streaming pipeline not initialized, skipping camera config monitor")
431
+ return
432
+
433
+ # Get check interval from job params (default: 60 seconds)
434
+ check_interval = self.job_params.get("camera_config_check_interval", 60)
435
+
436
+ # Import and create the monitor
437
+ from matrice_inference.server.stream.camera_config_monitor import CameraConfigMonitor
438
+
439
+ self.camera_config_monitor = CameraConfigMonitor(
440
+ app_deployment=self.app_deployment,
441
+ streaming_pipeline=self.streaming_pipeline,
442
+ check_interval=check_interval
443
+ )
444
+
445
+ # Start monitoring
446
+ self.camera_config_monitor.start()
447
+ logging.info(f"Camera config monitor started (check interval: {check_interval}s)")
448
+
449
+ except Exception as e:
450
+ logging.error(f"Failed to start camera config monitor: {str(e)}")
451
+ # Don't raise - monitor is optional
452
+
419
453
  def _stop_pipeline_in_new_loop(self):
420
454
  """Stop the pipeline in a new event loop (for use when already in an event loop)."""
421
455
  import asyncio
@@ -463,6 +497,14 @@ class MatriceDeployServer:
463
497
  # Signal shutdown to all components
464
498
  self._shutdown_event.set()
465
499
 
500
+ # Stop camera config monitor
501
+ if self.camera_config_monitor:
502
+ try:
503
+ self.camera_config_monitor.stop()
504
+ logging.info("Camera config monitor stopped")
505
+ except Exception as exc:
506
+ logging.error("Error stopping camera config monitor: %s", str(exc))
507
+
466
508
  # Stop streaming pipeline
467
509
  if self.streaming_pipeline:
468
510
  try:
@@ -0,0 +1,221 @@
1
+ """Background monitor for camera configuration updates."""
2
+
3
+ import hashlib
4
+ import json
5
+ import logging
6
+ import threading
7
+ import time
8
+ from typing import Dict, Optional
9
+
10
+ from matrice_inference.server.stream.utils import CameraConfig
11
+
12
+
13
+ class CameraConfigMonitor:
14
+ """Monitors and syncs camera configurations from app deployment API."""
15
+
16
+ DEFAULT_CHECK_INTERVAL = 120 # seconds
17
+
18
+ def __init__(
19
+ self,
20
+ app_deployment,
21
+ streaming_pipeline,
22
+ check_interval: int = DEFAULT_CHECK_INTERVAL
23
+ ):
24
+ """Initialize the camera config monitor.
25
+
26
+ Args:
27
+ app_deployment: AppDeployment instance to fetch configs
28
+ streaming_pipeline: StreamingPipeline instance to update
29
+ check_interval: Seconds between config checks
30
+ """
31
+ self.app_deployment = app_deployment
32
+ self.streaming_pipeline = streaming_pipeline
33
+ self.check_interval = max(10, int(check_interval)) # Minimum 10 seconds
34
+ self.running = False
35
+ self.thread: Optional[threading.Thread] = None
36
+ self.logger = logging.getLogger(__name__)
37
+
38
+ # Track camera configs by hash to detect changes (thread-safe access)
39
+ self.camera_hashes: Dict[str, str] = {}
40
+ self._hashes_lock = threading.Lock()
41
+
42
+ def start(self) -> None:
43
+ """Start the background monitoring thread."""
44
+ if self.running:
45
+ self.logger.warning("Camera config monitor already running")
46
+ return
47
+
48
+ self.running = True
49
+ self.thread = threading.Thread(
50
+ target=self._monitor_loop,
51
+ name="CameraConfigMonitor",
52
+ daemon=False
53
+ )
54
+ self.thread.start()
55
+ self.logger.info(f"Started camera config monitor (check interval: {self.check_interval}s)")
56
+
57
+ def stop(self) -> None:
58
+ """Stop the background monitoring thread."""
59
+ if not self.running:
60
+ return
61
+
62
+ self.running = False
63
+ if self.thread:
64
+ self.thread.join(timeout=5.0)
65
+ self.logger.info("Stopped camera config monitor")
66
+
67
+ def _monitor_loop(self) -> None:
68
+ """Main monitoring loop - periodically sync camera configs."""
69
+ while self.running:
70
+ try:
71
+ self._sync_camera_configs()
72
+ except Exception as e:
73
+ self.logger.error(f"Error syncing camera configs: {e}")
74
+
75
+ # Sleep in small intervals to allow quick shutdown
76
+ for _ in range(self.check_interval):
77
+ if not self.running:
78
+ break
79
+ time.sleep(1)
80
+
81
+ def _sync_camera_configs(self) -> None:
82
+ """Fetch latest configs from API and sync with pipeline."""
83
+ try:
84
+ # Fetch current configs from app deployment API
85
+ latest_configs = self.app_deployment.get_camera_configs()
86
+
87
+ if not latest_configs:
88
+ self.logger.debug("No camera configs returned from API")
89
+ return
90
+
91
+ # Process each camera config
92
+ for camera_id, camera_config in latest_configs.items():
93
+ self._process_camera_config(camera_id, camera_config)
94
+
95
+ # Optional: Remove cameras that are no longer in API
96
+ # Uncomment if you want to auto-remove deleted cameras
97
+ # self._remove_deleted_cameras(latest_configs)
98
+
99
+ except Exception as e:
100
+ self.logger.error(f"Failed to sync camera configs: {e}")
101
+
102
+ def _process_camera_config(self, camera_id: str, camera_config: CameraConfig) -> None:
103
+ """Process a single camera config - add new or update changed."""
104
+ try:
105
+ # Calculate config hash to detect changes
106
+ config_hash = self._hash_camera_config(camera_config)
107
+
108
+ # Thread-safe read of previous hash
109
+ with self._hashes_lock:
110
+ previous_hash = self.camera_hashes.get(camera_id)
111
+
112
+ # Check if this is a new camera or config changed
113
+ if previous_hash is None:
114
+ # New camera - add it
115
+ self._add_new_camera(camera_id, camera_config, config_hash)
116
+ elif previous_hash != config_hash:
117
+ # Config changed - update it
118
+ self._update_changed_camera(camera_id, camera_config, config_hash)
119
+ else:
120
+ # No change - skip
121
+ self.logger.debug(f"Camera {camera_id} config unchanged")
122
+
123
+ except Exception as e:
124
+ self.logger.error(f"Error processing camera {camera_id}: {e}")
125
+
126
+ def _add_new_camera(self, camera_id: str, camera_config: CameraConfig, config_hash: str) -> None:
127
+ """Add a new camera to the pipeline."""
128
+ try:
129
+ # Use asyncio to add camera config
130
+ import asyncio
131
+ loop = asyncio.new_event_loop()
132
+ asyncio.set_event_loop(loop)
133
+ try:
134
+ success = loop.run_until_complete(
135
+ self.streaming_pipeline.add_camera_config(camera_config)
136
+ )
137
+ if success:
138
+ # Thread-safe write
139
+ with self._hashes_lock:
140
+ self.camera_hashes[camera_id] = config_hash
141
+ self.logger.info(f"Added new camera: {camera_id}")
142
+ else:
143
+ self.logger.warning(f"Failed to add camera: {camera_id}")
144
+ finally:
145
+ loop.close()
146
+
147
+ except Exception as e:
148
+ self.logger.error(f"Error adding camera {camera_id}: {e}")
149
+
150
+ def _update_changed_camera(self, camera_id: str, camera_config: CameraConfig, config_hash: str) -> None:
151
+ """Update an existing camera with changed config."""
152
+ try:
153
+ # Use asyncio to update camera config
154
+ import asyncio
155
+ loop = asyncio.new_event_loop()
156
+ asyncio.set_event_loop(loop)
157
+ try:
158
+ success = loop.run_until_complete(
159
+ self.streaming_pipeline.update_camera_config(camera_config)
160
+ )
161
+ if success:
162
+ # Thread-safe write
163
+ with self._hashes_lock:
164
+ self.camera_hashes[camera_id] = config_hash
165
+ self.logger.info(f"Updated camera config: {camera_id}")
166
+ else:
167
+ self.logger.warning(f"Failed to update camera: {camera_id}")
168
+ finally:
169
+ loop.close()
170
+
171
+ except Exception as e:
172
+ self.logger.error(f"Error updating camera {camera_id}: {e}")
173
+
174
+ def _remove_deleted_cameras(self, latest_configs: Dict[str, CameraConfig]) -> None:
175
+ """Remove cameras that are no longer in the API response."""
176
+ # Thread-safe read
177
+ with self._hashes_lock:
178
+ current_camera_ids = set(self.camera_hashes.keys())
179
+
180
+ latest_camera_ids = set(latest_configs.keys())
181
+ deleted_camera_ids = current_camera_ids - latest_camera_ids
182
+
183
+ for camera_id in deleted_camera_ids:
184
+ try:
185
+ import asyncio
186
+ loop = asyncio.new_event_loop()
187
+ asyncio.set_event_loop(loop)
188
+ try:
189
+ success = loop.run_until_complete(
190
+ self.streaming_pipeline.remove_camera_config(camera_id)
191
+ )
192
+ if success:
193
+ # Thread-safe delete
194
+ with self._hashes_lock:
195
+ del self.camera_hashes[camera_id]
196
+ self.logger.info(f"Removed deleted camera: {camera_id}")
197
+ finally:
198
+ loop.close()
199
+ except Exception as e:
200
+ self.logger.error(f"Error removing camera {camera_id}: {e}")
201
+
202
+ def _hash_camera_config(self, camera_config: CameraConfig) -> str:
203
+ """Generate a hash of the camera config to detect changes."""
204
+ try:
205
+ # Create a dict with relevant config fields
206
+ config_dict = {
207
+ "camera_id": camera_config.camera_id,
208
+ "input_topic": camera_config.input_topic,
209
+ "output_topic": camera_config.output_topic,
210
+ "stream_config": camera_config.stream_config,
211
+ "enabled": camera_config.enabled
212
+ }
213
+
214
+ # Convert to JSON string (sorted for consistency) and hash
215
+ config_str = json.dumps(config_dict, sort_keys=True)
216
+ return hashlib.md5(config_str.encode()).hexdigest()
217
+
218
+ except Exception as e:
219
+ self.logger.error(f"Error hashing camera config: {e}")
220
+ return ""
221
+