matrice-inference 0.1.2__py3-none-any.whl → 0.1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matrice-inference might be problematic. Click here for more details.

@@ -1,30 +1,42 @@
1
1
  import asyncio
2
2
  import json
3
- import time
4
3
  import logging
5
- import threading
6
4
  import queue
7
- from typing import Any, Dict
5
+ import threading
6
+ import time
7
+ from typing import Any, Dict, Optional
8
+
8
9
  from matrice_common.stream.matrice_stream import MatriceStream
9
10
  from matrice_inference.server.stream.utils import CameraConfig
10
11
 
11
12
  class ProducerWorker:
12
- """Handles message production to streams."""
13
-
14
- def __init__(self, worker_id: int, output_queue: queue.PriorityQueue,
15
- camera_configs: Dict[str, CameraConfig], message_timeout: float):
13
+ """Handles message production to streams with clean resource management."""
14
+
15
+ DEFAULT_DB = 0
16
+
17
+ def __init__(
18
+ self,
19
+ worker_id: int,
20
+ output_queue: queue.PriorityQueue,
21
+ camera_configs: Dict[str, CameraConfig],
22
+ message_timeout: float
23
+ ):
16
24
  self.worker_id = worker_id
17
25
  self.output_queue = output_queue
18
26
  self.camera_configs = camera_configs
19
27
  self.message_timeout = message_timeout
20
28
  self.running = False
21
- self.producer_streams = {} # Will be created in worker thread's event loop
29
+ self.producer_streams: Dict[str, MatriceStream] = {}
22
30
  self.logger = logging.getLogger(f"{__name__}.producer.{worker_id}")
23
31
 
24
- def start(self):
32
+ def start(self) -> threading.Thread:
25
33
  """Start the producer worker in a separate thread."""
26
34
  self.running = True
27
- thread = threading.Thread(target=self._run, name=f"ProducerWorker-{self.worker_id}", daemon=False)
35
+ thread = threading.Thread(
36
+ target=self._run,
37
+ name=f"ProducerWorker-{self.worker_id}",
38
+ daemon=False
39
+ )
28
40
  thread.start()
29
41
  return thread
30
42
 
@@ -32,116 +44,161 @@ class ProducerWorker:
32
44
  """Stop the producer worker."""
33
45
  self.running = False
34
46
 
35
- def _run(self):
36
- """Main producer loop."""
47
+ def _run(self) -> None:
48
+ """Main producer loop with proper resource management."""
37
49
  loop = asyncio.new_event_loop()
38
50
  asyncio.set_event_loop(loop)
39
-
51
+
40
52
  self.logger.info(f"Started producer worker {self.worker_id}")
41
-
53
+
42
54
  try:
43
- # Initialize streams for all cameras in this event loop
44
55
  loop.run_until_complete(self._initialize_streams())
45
-
46
- while self.running:
47
- try:
48
- # Get task from output queue
49
- try:
50
- priority, timestamp, task_data = self.output_queue.get(timeout=self.message_timeout)
51
- except queue.Empty:
52
- continue
53
-
54
- # Send message to stream
55
- loop.run_until_complete(self._send_message_safely(task_data))
56
-
57
- except Exception as e:
58
- self.logger.error(f"Producer error: {e}")
59
- time.sleep(0.1)
60
-
56
+ self._process_messages(loop)
57
+ except Exception as e:
58
+ self.logger.error(f"Fatal error in producer worker: {e}")
61
59
  finally:
62
- # Clean up streams
63
- for stream in self.producer_streams.values():
64
- try:
65
- loop.run_until_complete(stream.async_close())
66
- except Exception as e:
67
- self.logger.error(f"Error closing producer stream: {e}")
60
+ self._cleanup_resources(loop)
61
+
62
+ def _process_messages(self, loop: asyncio.AbstractEventLoop) -> None:
63
+ """Main message processing loop."""
64
+ while self.running:
65
+ try:
66
+ task = self._get_task_from_queue()
67
+ if task:
68
+ loop.run_until_complete(self._send_message_safely(task))
69
+ except Exception as e:
70
+ self.logger.error(f"Producer error: {e}")
71
+ time.sleep(0.1)
72
+
73
+ def _get_task_from_queue(self) -> Optional[Dict[str, Any]]:
74
+ """Get task from output queue with timeout handling."""
75
+ try:
76
+ priority, timestamp, task_data = self.output_queue.get(timeout=self.message_timeout)
77
+ return task_data
78
+ except queue.Empty:
79
+ return None
80
+ except Exception as e:
81
+ self.logger.error(f"Error getting task from queue: {e}")
82
+ return None
83
+
84
+ def _cleanup_resources(self, loop: asyncio.AbstractEventLoop) -> None:
85
+ """Clean up streams and event loop resources."""
86
+ for stream in self.producer_streams.values():
87
+ try:
88
+ loop.run_until_complete(stream.async_close())
89
+ except Exception as e:
90
+ self.logger.error(f"Error closing producer stream: {e}")
91
+
92
+ try:
68
93
  loop.close()
69
- self.logger.info(f"Producer worker {self.worker_id} stopped")
94
+ except Exception as e:
95
+ self.logger.error(f"Error closing event loop: {e}")
96
+
97
+ self.logger.info(f"Producer worker {self.worker_id} stopped")
70
98
 
71
- async def _initialize_streams(self):
72
- """Initialize producer streams for all cameras in the current event loop."""
99
+ async def _initialize_streams(self) -> None:
100
+ """Initialize producer streams for all cameras with proper error handling."""
73
101
  try:
74
102
  from matrice_common.stream.matrice_stream import MatriceStream, StreamType
75
-
103
+
76
104
  for camera_id, camera_config in self.camera_configs.items():
77
105
  try:
78
- stream_config = camera_config.stream_config
79
- output_topic = camera_config.output_topic
80
-
81
- # Determine stream type
82
- stream_type = StreamType.KAFKA if stream_config.get("stream_type", "kafka").lower() == "kafka" else StreamType.REDIS
83
-
84
- # Create stream configuration
85
- if stream_type == StreamType.KAFKA:
86
- stream_params = {
87
- "bootstrap_servers": stream_config.get("bootstrap_servers", "localhost:9092"),
88
- "sasl_username": stream_config.get("sasl_username", "matrice-sdk-user"),
89
- "sasl_password": stream_config.get("sasl_password", "matrice-sdk-password"),
90
- "sasl_mechanism": stream_config.get("sasl_mechanism", "SCRAM-SHA-256"),
91
- "security_protocol": stream_config.get("security_protocol", "SASL_PLAINTEXT"),
92
- }
93
- else: # Redis
94
- stream_params = {
95
- "host": stream_config.get("host", "localhost"),
96
- "port": stream_config.get("port", 6379),
97
- "password": stream_config.get("password"),
98
- "username": stream_config.get("username"),
99
- "db": stream_config.get("db", 0),
100
- }
101
-
102
- # Create and setup producer stream
103
- producer_stream = MatriceStream(stream_type, **stream_params)
104
- await producer_stream.async_setup(output_topic)
105
- self.producer_streams[camera_id] = producer_stream
106
-
107
- self.logger.info(f"Initialized {stream_type.value} producer stream for camera {camera_id} in worker {self.worker_id}")
108
-
106
+ await self._initialize_camera_stream(camera_id, camera_config, StreamType)
109
107
  except Exception as e:
110
108
  self.logger.error(f"Failed to initialize producer stream for camera {camera_id}: {e}")
111
109
  continue
112
-
110
+
113
111
  except Exception as e:
114
112
  self.logger.error(f"Failed to initialize producer streams: {e}")
115
113
  raise
114
+
115
+ async def _initialize_camera_stream(
116
+ self, camera_id: str, camera_config: CameraConfig, StreamType: Any
117
+ ) -> None:
118
+ """Initialize producer stream for a single camera."""
119
+ from matrice_common.stream.matrice_stream import MatriceStream
120
+
121
+ stream_type = self._get_stream_type(camera_config.stream_config, StreamType)
122
+ stream_params = self._build_stream_params(camera_config.stream_config, stream_type, StreamType)
123
+
124
+ producer_stream = MatriceStream(stream_type, **stream_params)
125
+ await producer_stream.async_setup(camera_config.output_topic)
126
+ self.producer_streams[camera_id] = producer_stream
127
+
128
+ self.logger.info(
129
+ f"Initialized {stream_type.value} producer stream for camera {camera_id} in worker {self.worker_id}"
130
+ )
131
+
132
+ def _get_stream_type(self, stream_config: Dict[str, Any], StreamType: Any) -> Any:
133
+ """Determine stream type from configuration."""
134
+ stream_type_str = stream_config.get("stream_type", "kafka").lower()
135
+ return StreamType.KAFKA if stream_type_str == "kafka" else StreamType.REDIS
136
+
137
+ def _build_stream_params(self, stream_config: Dict[str, Any], stream_type: Any, StreamType: Any) -> Dict[str, Any]:
138
+ """Build stream parameters based on type."""
139
+ if stream_type == StreamType.KAFKA:
140
+ return {
141
+ "bootstrap_servers": stream_config.get("bootstrap_servers", "localhost:9092"),
142
+ "sasl_username": stream_config.get("sasl_username", "matrice-sdk-user"),
143
+ "sasl_password": stream_config.get("sasl_password", "matrice-sdk-password"),
144
+ "sasl_mechanism": stream_config.get("sasl_mechanism", "SCRAM-SHA-256"),
145
+ "security_protocol": stream_config.get("security_protocol", "SASL_PLAINTEXT"),
146
+ }
147
+ else:
148
+ return {
149
+ "host": stream_config.get("host", "localhost"),
150
+ "port": stream_config.get("port", 6379),
151
+ "password": stream_config.get("password"),
152
+ "username": stream_config.get("username"),
153
+ "db": stream_config.get("db", self.DEFAULT_DB),
154
+ }
116
155
 
117
- async def _send_message_safely(self, task_data: Dict[str, Any]):
118
- """Send message to the appropriate stream safely."""
156
+ async def _send_message_safely(self, task_data: Dict[str, Any]) -> None:
157
+ """Send message to the appropriate stream with validation and error handling."""
119
158
  try:
120
- camera_id = task_data["camera_id"]
121
- message_key = task_data["message_key"]
122
- data = task_data["data"]
123
-
124
- # Check if camera and stream still exist
125
- if camera_id not in self.producer_streams or camera_id not in self.camera_configs:
126
- self.logger.warning(f"Camera {camera_id} not found in producer streams or configs")
159
+ if not self._validate_task_data(task_data):
127
160
  return
128
-
129
- camera_config = self.camera_configs[camera_id]
130
- if not camera_config.enabled:
131
- self.logger.debug(f"Camera {camera_id} is disabled, skipping message")
161
+
162
+ camera_id = task_data["camera_id"]
163
+
164
+ if not self._validate_camera_availability(camera_id):
132
165
  return
133
-
134
- # Get producer stream for camera
135
- producer_stream = self.producer_streams[camera_id]
136
- output_topic = camera_config.output_topic
137
-
138
- # Send message to stream
139
- await producer_stream.async_add_message(
140
- output_topic,
141
- json.dumps(data),
142
- key=message_key
143
- )
144
-
166
+
167
+ await self._send_message_to_stream(task_data, camera_id)
168
+
145
169
  except Exception as e:
146
170
  self.logger.error(f"Error sending message: {e}")
147
171
 
172
+ def _validate_task_data(self, task_data: Dict[str, Any]) -> bool:
173
+ """Validate that task data contains required fields."""
174
+ required_fields = ["camera_id", "message_key", "data"]
175
+ for field in required_fields:
176
+ if field not in task_data:
177
+ self.logger.error(f"Missing required field '{field}' in task data")
178
+ return False
179
+ return True
180
+
181
+ def _validate_camera_availability(self, camera_id: str) -> bool:
182
+ """Validate that camera and its stream are available."""
183
+ if camera_id not in self.producer_streams or camera_id not in self.camera_configs:
184
+ self.logger.warning(f"Camera {camera_id} not found in producer streams or configs")
185
+ return False
186
+
187
+ camera_config = self.camera_configs[camera_id]
188
+ if not camera_config.enabled:
189
+ self.logger.debug(f"Camera {camera_id} is disabled, skipping message")
190
+ return False
191
+
192
+ return True
193
+
194
+ async def _send_message_to_stream(self, task_data: Dict[str, Any], camera_id: str) -> None:
195
+ """Send message to the stream for the specified camera."""
196
+ producer_stream = self.producer_streams[camera_id]
197
+ camera_config = self.camera_configs[camera_id]
198
+
199
+ await producer_stream.async_add_message(
200
+ camera_config.output_topic,
201
+ json.dumps(task_data["data"]),
202
+ key=task_data["message_key"]
203
+ )
204
+