matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice_streaming/__init__.py +44 -32
- matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
- matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
- matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
- matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
- matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
- matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
- matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
- matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
- matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
- matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
- matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
- matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
- matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
- matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
- matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
- matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
- matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
- matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
- matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
- matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
- matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
- matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
- matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
- matrice_streaming/streaming_gateway/streaming_action.py +71 -20
- matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
- matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
- matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
- matrice_streaming-0.1.65.dist-info/RECORD +56 -0
- matrice_streaming-0.1.14.dist-info/RECORD +0 -38
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1063 @@
|
|
|
1
|
+
"""GStreamer-based async camera worker process.
|
|
2
|
+
|
|
3
|
+
This module implements an async event loop worker that handles multiple cameras
|
|
4
|
+
using GStreamer pipelines for efficient hardware/software video encoding.
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import logging
|
|
8
|
+
import time
|
|
9
|
+
import multiprocessing
|
|
10
|
+
import os
|
|
11
|
+
import psutil
|
|
12
|
+
from typing import Dict, Any, Optional, List, Union
|
|
13
|
+
from collections import deque
|
|
14
|
+
|
|
15
|
+
from .message_builder import StreamMessageBuilder
|
|
16
|
+
from .stream_statistics import StreamStatistics
|
|
17
|
+
|
|
18
|
+
# Frame optimization
|
|
19
|
+
try:
|
|
20
|
+
from matrice_common.optimize import FrameOptimizer
|
|
21
|
+
FRAME_OPTIMIZER_AVAILABLE = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
FRAME_OPTIMIZER_AVAILABLE = False
|
|
24
|
+
|
|
25
|
+
# GStreamer imports (optional)
|
|
26
|
+
GST_AVAILABLE = False
|
|
27
|
+
try:
|
|
28
|
+
import gi
|
|
29
|
+
gi.require_version('Gst', '1.0')
|
|
30
|
+
gi.require_version('GstApp', '1.0')
|
|
31
|
+
from gi.repository import Gst, GstApp, GLib
|
|
32
|
+
GST_AVAILABLE = True
|
|
33
|
+
except ImportError:
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class GStreamerAsyncWorker:
|
|
38
|
+
"""Async worker process that handles multiple cameras using GStreamer.
|
|
39
|
+
|
|
40
|
+
This worker runs an async event loop to manage GStreamer pipelines
|
|
41
|
+
for multiple cameras with efficient encoding (NVENC/x264/JPEG).
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
worker_id: int,
|
|
47
|
+
camera_configs: List[Dict[str, Any]],
|
|
48
|
+
stream_config: Dict[str, Any],
|
|
49
|
+
stop_event: multiprocessing.Event,
|
|
50
|
+
health_queue: multiprocessing.Queue,
|
|
51
|
+
command_queue: Optional[multiprocessing.Queue] = None,
|
|
52
|
+
response_queue: Optional[multiprocessing.Queue] = None,
|
|
53
|
+
gstreamer_encoder: str = "auto",
|
|
54
|
+
gstreamer_codec: str = "h264",
|
|
55
|
+
gstreamer_preset: str = "low-latency",
|
|
56
|
+
gpu_id: int = 0,
|
|
57
|
+
# Platform-specific parameters
|
|
58
|
+
platform: str = "auto",
|
|
59
|
+
use_hardware_decode: bool = True,
|
|
60
|
+
use_hardware_jpeg: bool = True,
|
|
61
|
+
jetson_use_nvmm: bool = True,
|
|
62
|
+
frame_optimizer_mode: str = "hash-only",
|
|
63
|
+
fallback_on_error: bool = True,
|
|
64
|
+
verbose_pipeline_logging: bool = False,
|
|
65
|
+
):
|
|
66
|
+
"""Initialize GStreamer async worker.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
worker_id: Unique identifier for this worker
|
|
70
|
+
camera_configs: List of camera configurations
|
|
71
|
+
stream_config: Streaming configuration (Redis, Kafka, etc.)
|
|
72
|
+
stop_event: Event to signal worker shutdown
|
|
73
|
+
health_queue: Queue for reporting health status
|
|
74
|
+
command_queue: Queue for receiving dynamic commands
|
|
75
|
+
response_queue: Queue for sending responses
|
|
76
|
+
gstreamer_encoder: Encoder type (auto, nvenc, x264, openh264, jpeg)
|
|
77
|
+
gstreamer_codec: Codec (h264, h265)
|
|
78
|
+
gstreamer_preset: NVENC preset
|
|
79
|
+
gpu_id: GPU device ID for NVENC
|
|
80
|
+
platform: Platform override (auto, jetson, desktop-gpu, intel, amd, cpu)
|
|
81
|
+
use_hardware_decode: Enable hardware decode
|
|
82
|
+
use_hardware_jpeg: Enable hardware JPEG encoding
|
|
83
|
+
jetson_use_nvmm: Use NVMM zero-copy on Jetson
|
|
84
|
+
frame_optimizer_mode: Frame optimization mode
|
|
85
|
+
fallback_on_error: Fallback to CPU on errors
|
|
86
|
+
verbose_pipeline_logging: Enable verbose logging
|
|
87
|
+
"""
|
|
88
|
+
if not GST_AVAILABLE:
|
|
89
|
+
raise RuntimeError("GStreamer not available for GStreamerAsyncWorker")
|
|
90
|
+
|
|
91
|
+
self.worker_id = worker_id
|
|
92
|
+
self.camera_configs = camera_configs
|
|
93
|
+
self.stream_config = stream_config
|
|
94
|
+
self.stop_event = stop_event
|
|
95
|
+
self.health_queue = health_queue
|
|
96
|
+
self.command_queue = command_queue
|
|
97
|
+
self.response_queue = response_queue
|
|
98
|
+
|
|
99
|
+
# GStreamer settings
|
|
100
|
+
self.gstreamer_encoder = gstreamer_encoder
|
|
101
|
+
self.gstreamer_codec = gstreamer_codec
|
|
102
|
+
self.gstreamer_preset = gstreamer_preset
|
|
103
|
+
self.gpu_id = gpu_id
|
|
104
|
+
|
|
105
|
+
# Logging
|
|
106
|
+
self.logger = logging.getLogger(f"GStreamerWorker-{worker_id}")
|
|
107
|
+
self.logger.info(f"Initializing GStreamer worker {worker_id} with {len(camera_configs)} cameras")
|
|
108
|
+
|
|
109
|
+
# Initialize GStreamer
|
|
110
|
+
Gst.init(None)
|
|
111
|
+
|
|
112
|
+
# Platform detection and pipeline builder (NEW)
|
|
113
|
+
from .device_detection import PlatformDetector
|
|
114
|
+
from .platform_pipelines import PipelineFactory
|
|
115
|
+
from .gstreamer_camera_streamer import GStreamerConfig
|
|
116
|
+
|
|
117
|
+
# Build GStreamerConfig from worker settings
|
|
118
|
+
self.gstreamer_config = GStreamerConfig(
|
|
119
|
+
encoder=gstreamer_encoder,
|
|
120
|
+
codec=gstreamer_codec,
|
|
121
|
+
preset=gstreamer_preset,
|
|
122
|
+
gpu_id=gpu_id,
|
|
123
|
+
platform=platform,
|
|
124
|
+
use_hardware_decode=use_hardware_decode,
|
|
125
|
+
use_hardware_jpeg=use_hardware_jpeg,
|
|
126
|
+
jetson_use_nvmm=jetson_use_nvmm,
|
|
127
|
+
frame_optimizer_mode=frame_optimizer_mode,
|
|
128
|
+
fallback_on_error=fallback_on_error,
|
|
129
|
+
verbose_pipeline_logging=verbose_pipeline_logging,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
self.platform_detector = PlatformDetector.get_instance()
|
|
133
|
+
self.platform_info = self.platform_detector.detect()
|
|
134
|
+
self.pipeline_builder = PipelineFactory.get_builder(self.gstreamer_config, self.platform_info)
|
|
135
|
+
|
|
136
|
+
self.logger.info(
|
|
137
|
+
f"Worker {worker_id}: Platform={self.platform_info.platform_type.value}, "
|
|
138
|
+
f"Recommended encoder={self.platform_info.recommended_encoder}"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Components
|
|
142
|
+
self.message_builder = StreamMessageBuilder(
|
|
143
|
+
service_id=stream_config.get('service_id', 'streaming_gateway'),
|
|
144
|
+
strip_input_content=False
|
|
145
|
+
)
|
|
146
|
+
self.statistics = StreamStatistics()
|
|
147
|
+
|
|
148
|
+
# Initialize frame optimizer for skipping similar frames (hash-based for GStreamer)
|
|
149
|
+
if FRAME_OPTIMIZER_AVAILABLE:
|
|
150
|
+
self.frame_optimizer = FrameOptimizer(
|
|
151
|
+
enabled=True,
|
|
152
|
+
scale=0.4,
|
|
153
|
+
diff_threshold=15,
|
|
154
|
+
similarity_threshold=0.05,
|
|
155
|
+
bg_update_interval=10,
|
|
156
|
+
)
|
|
157
|
+
else:
|
|
158
|
+
self.frame_optimizer = None
|
|
159
|
+
self._last_sent_frame_ids: Dict[str, str] = {} # stream_key -> last sent frame_id
|
|
160
|
+
self._last_frame_hashes: Dict[str, str] = {} # stream_key -> frame hash for similarity
|
|
161
|
+
|
|
162
|
+
# Pipeline management
|
|
163
|
+
self.camera_tasks: Dict[str, asyncio.Task] = {}
|
|
164
|
+
self.pipelines: Dict[str, Any] = {} # GStreamer pipelines
|
|
165
|
+
|
|
166
|
+
# Redis client
|
|
167
|
+
self.redis_client = None
|
|
168
|
+
|
|
169
|
+
# Metrics
|
|
170
|
+
self._encoding_times = deque(maxlen=100)
|
|
171
|
+
self._frame_times = deque(maxlen=100)
|
|
172
|
+
self._frames_encoded = 0
|
|
173
|
+
self._encoding_errors = 0
|
|
174
|
+
self._last_metrics_log = time.time()
|
|
175
|
+
self._metrics_log_interval = 30.0
|
|
176
|
+
self._process_info = psutil.Process(os.getpid())
|
|
177
|
+
|
|
178
|
+
# Detected encoder
|
|
179
|
+
self._detected_encoder: Optional[str] = None
|
|
180
|
+
|
|
181
|
+
self.logger.info(
|
|
182
|
+
f"GStreamer Worker {worker_id}: encoder={gstreamer_encoder}, "
|
|
183
|
+
f"codec={gstreamer_codec}, gpu_id={gpu_id}"
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def _detect_encoder(self) -> str:
|
|
187
|
+
"""Detect the best available encoder."""
|
|
188
|
+
if self.gstreamer_encoder != "auto":
|
|
189
|
+
return self.gstreamer_encoder
|
|
190
|
+
|
|
191
|
+
encoders = [
|
|
192
|
+
("nvenc", "nvh264enc ! fakesink"),
|
|
193
|
+
("x264", "x264enc ! fakesink"),
|
|
194
|
+
("openh264", "openh264enc ! fakesink"),
|
|
195
|
+
("jpeg", "jpegenc ! fakesink"),
|
|
196
|
+
]
|
|
197
|
+
|
|
198
|
+
for name, test_str in encoders:
|
|
199
|
+
try:
|
|
200
|
+
test = Gst.parse_launch(test_str)
|
|
201
|
+
if test:
|
|
202
|
+
test.set_state(Gst.State.NULL)
|
|
203
|
+
self.logger.info(f"Detected encoder: {name}")
|
|
204
|
+
return name
|
|
205
|
+
except Exception:
|
|
206
|
+
continue
|
|
207
|
+
|
|
208
|
+
return "x264"
|
|
209
|
+
|
|
210
|
+
def _build_pipeline_string(
|
|
211
|
+
self,
|
|
212
|
+
source: Union[str, int],
|
|
213
|
+
width: int,
|
|
214
|
+
height: int,
|
|
215
|
+
fps: int,
|
|
216
|
+
quality: int = 85
|
|
217
|
+
) -> str:
|
|
218
|
+
"""Build platform-optimized GStreamer pipeline string.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
source: Video source
|
|
222
|
+
width: Target width
|
|
223
|
+
height: Target height
|
|
224
|
+
fps: Target FPS
|
|
225
|
+
quality: JPEG quality (for jpeg encoder)
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Pipeline description string
|
|
229
|
+
"""
|
|
230
|
+
# Detect source type
|
|
231
|
+
source_str = str(source)
|
|
232
|
+
if isinstance(source, int):
|
|
233
|
+
source_type = "camera"
|
|
234
|
+
elif source_str.startswith("rtsp://"):
|
|
235
|
+
source_type = "rtsp"
|
|
236
|
+
elif source_str.startswith(("http://", "https://")):
|
|
237
|
+
source_type = "http"
|
|
238
|
+
elif source_str.endswith((".mp4", ".mov", ".m4v", ".mkv", ".webm", ".avi")):
|
|
239
|
+
source_type = "file"
|
|
240
|
+
else:
|
|
241
|
+
source_type = "camera"
|
|
242
|
+
|
|
243
|
+
# Detect encoder
|
|
244
|
+
encoder_type = self._detected_encoder or self._detect_encoder()
|
|
245
|
+
self._detected_encoder = encoder_type
|
|
246
|
+
|
|
247
|
+
# Build config dict for pipeline builder
|
|
248
|
+
builder_config = {
|
|
249
|
+
'source_type': source_type,
|
|
250
|
+
'source': source,
|
|
251
|
+
'width': width,
|
|
252
|
+
'height': height,
|
|
253
|
+
'fps': fps,
|
|
254
|
+
'encoder': encoder_type,
|
|
255
|
+
'quality': quality,
|
|
256
|
+
'bitrate': 4000000, # 4 Mbps
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
# Use pipeline builder to construct platform-optimized pipeline
|
|
260
|
+
if self.gstreamer_config.frame_optimizer_mode == "dual-appsink":
|
|
261
|
+
pipeline_str = self.pipeline_builder.build_dual_appsink_pipeline(builder_config)
|
|
262
|
+
else:
|
|
263
|
+
pipeline_str = self.pipeline_builder.build_complete_pipeline(builder_config)
|
|
264
|
+
|
|
265
|
+
if self.gstreamer_config.verbose_pipeline_logging:
|
|
266
|
+
self.logger.info(f"Worker {self.worker_id} Pipeline: {pipeline_str[:150]}...")
|
|
267
|
+
|
|
268
|
+
return pipeline_str
|
|
269
|
+
|
|
270
|
+
async def initialize(self):
|
|
271
|
+
"""Initialize async resources."""
|
|
272
|
+
try:
|
|
273
|
+
from matrice_common.stream import MatriceStream, StreamType
|
|
274
|
+
|
|
275
|
+
self.stream = MatriceStream(
|
|
276
|
+
stream_type=StreamType.REDIS,
|
|
277
|
+
**self.stream_config
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
self.redis_client = self.stream.async_client
|
|
281
|
+
await self.redis_client.setup_client()
|
|
282
|
+
|
|
283
|
+
self.logger.info(f"Worker {self.worker_id}: Initialized async Redis client")
|
|
284
|
+
|
|
285
|
+
except Exception as exc:
|
|
286
|
+
self.logger.error(f"Worker {self.worker_id}: Failed to initialize: {exc}", exc_info=True)
|
|
287
|
+
raise
|
|
288
|
+
|
|
289
|
+
async def run(self):
|
|
290
|
+
"""Main worker loop."""
|
|
291
|
+
try:
|
|
292
|
+
await self.initialize()
|
|
293
|
+
|
|
294
|
+
# Start camera tasks
|
|
295
|
+
for camera_config in self.camera_configs:
|
|
296
|
+
await self._add_camera_internal(camera_config)
|
|
297
|
+
|
|
298
|
+
self._report_health("running", len(self.camera_tasks))
|
|
299
|
+
|
|
300
|
+
# Start command handler
|
|
301
|
+
command_task = None
|
|
302
|
+
if self.command_queue:
|
|
303
|
+
command_task = asyncio.create_task(
|
|
304
|
+
self._command_handler(),
|
|
305
|
+
name="command-handler"
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
# Monitor loop
|
|
309
|
+
while not self.stop_event.is_set():
|
|
310
|
+
# Check tasks
|
|
311
|
+
for stream_key, task in list(self.camera_tasks.items()):
|
|
312
|
+
if task.done():
|
|
313
|
+
try:
|
|
314
|
+
task.result()
|
|
315
|
+
except Exception as exc:
|
|
316
|
+
self.logger.error(f"Camera {stream_key} failed: {exc}")
|
|
317
|
+
del self.camera_tasks[stream_key]
|
|
318
|
+
|
|
319
|
+
self._report_health("running", len(self.camera_tasks))
|
|
320
|
+
await asyncio.sleep(1.0)
|
|
321
|
+
|
|
322
|
+
# Shutdown
|
|
323
|
+
if command_task and not command_task.done():
|
|
324
|
+
command_task.cancel()
|
|
325
|
+
try:
|
|
326
|
+
await command_task
|
|
327
|
+
except asyncio.CancelledError:
|
|
328
|
+
pass
|
|
329
|
+
|
|
330
|
+
await self._shutdown()
|
|
331
|
+
|
|
332
|
+
except Exception as exc:
|
|
333
|
+
self.logger.error(f"Worker {self.worker_id}: Fatal error: {exc}", exc_info=True)
|
|
334
|
+
self._report_health("error", error=str(exc))
|
|
335
|
+
raise
|
|
336
|
+
|
|
337
|
+
async def _camera_handler(self, camera_config: Dict[str, Any]):
|
|
338
|
+
"""Handle a single camera with GStreamer pipeline.
|
|
339
|
+
|
|
340
|
+
Args:
|
|
341
|
+
camera_config: Camera configuration
|
|
342
|
+
"""
|
|
343
|
+
stream_key = camera_config['stream_key']
|
|
344
|
+
stream_group_key = camera_config.get('stream_group_key', 'default')
|
|
345
|
+
source = camera_config['source']
|
|
346
|
+
topic = camera_config['topic']
|
|
347
|
+
fps = camera_config.get('fps', 30)
|
|
348
|
+
quality = camera_config.get('quality', 85)
|
|
349
|
+
width = camera_config.get('width', 640)
|
|
350
|
+
height = camera_config.get('height', 480)
|
|
351
|
+
camera_location = camera_config.get('camera_location', 'Unknown')
|
|
352
|
+
simulate_video_file_stream = camera_config.get('simulate_video_file_stream', False)
|
|
353
|
+
|
|
354
|
+
# Retry settings
|
|
355
|
+
MIN_RETRY_COOLDOWN = 5
|
|
356
|
+
MAX_RETRY_COOLDOWN = 30
|
|
357
|
+
retry_cycle = 0
|
|
358
|
+
|
|
359
|
+
# Detect source type for proper handling of video file end-of-stream
|
|
360
|
+
source_str = str(source)
|
|
361
|
+
is_video_file = source_str.endswith(('.mp4', '.avi', '.mkv', '.mov', '.webm'))
|
|
362
|
+
|
|
363
|
+
# Track if we've successfully warmed up once (for faster restarts)
|
|
364
|
+
has_warmed_up_once = False
|
|
365
|
+
|
|
366
|
+
# OUTER LOOP: Retry forever
|
|
367
|
+
while not self.stop_event.is_set():
|
|
368
|
+
pipeline = None
|
|
369
|
+
appsink = None
|
|
370
|
+
start_pts = None
|
|
371
|
+
wall_start_time = 0.0
|
|
372
|
+
consecutive_failures = 0
|
|
373
|
+
frame_counter = 0
|
|
374
|
+
|
|
375
|
+
try:
|
|
376
|
+
# Build and start pipeline
|
|
377
|
+
pipeline_str = self._build_pipeline_string(source, width, height, fps, quality)
|
|
378
|
+
if not has_warmed_up_once:
|
|
379
|
+
self.logger.info(f"Worker {self.worker_id}: Starting pipeline for {stream_key}")
|
|
380
|
+
|
|
381
|
+
pipeline = Gst.parse_launch(pipeline_str)
|
|
382
|
+
appsink = pipeline.get_by_name("sink")
|
|
383
|
+
|
|
384
|
+
if not appsink:
|
|
385
|
+
raise RuntimeError("Failed to get appsink")
|
|
386
|
+
|
|
387
|
+
# Start pipeline
|
|
388
|
+
ret = pipeline.set_state(Gst.State.PLAYING)
|
|
389
|
+
if ret == Gst.StateChangeReturn.FAILURE:
|
|
390
|
+
raise RuntimeError("Failed to start pipeline")
|
|
391
|
+
|
|
392
|
+
pipeline.get_state(Gst.CLOCK_TIME_NONE)
|
|
393
|
+
|
|
394
|
+
self.pipelines[stream_key] = pipeline
|
|
395
|
+
retry_cycle = 0
|
|
396
|
+
|
|
397
|
+
if not has_warmed_up_once:
|
|
398
|
+
self.logger.info(
|
|
399
|
+
f"Worker {self.worker_id}: Camera {stream_key} started - "
|
|
400
|
+
f"{width}x{height} @ {fps} FPS (encoder: {self._detected_encoder})"
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
# Wait for first frame (warmup)
|
|
404
|
+
# First time: longer timeout with initial delay
|
|
405
|
+
# Subsequent restarts: fast path with no delay
|
|
406
|
+
warmup_start = time.time()
|
|
407
|
+
warmup_success = False
|
|
408
|
+
first_sample = None
|
|
409
|
+
|
|
410
|
+
if has_warmed_up_once:
|
|
411
|
+
# Fast path for video loop restarts - no delay, shorter timeout
|
|
412
|
+
warmup_timeout = 2.0
|
|
413
|
+
warmup_attempts = 0
|
|
414
|
+
while time.time() - warmup_start < warmup_timeout:
|
|
415
|
+
if self.stop_event.is_set():
|
|
416
|
+
break
|
|
417
|
+
warmup_attempts += 1
|
|
418
|
+
try:
|
|
419
|
+
first_sample = await asyncio.get_event_loop().run_in_executor(
|
|
420
|
+
None,
|
|
421
|
+
lambda: appsink.try_pull_sample(Gst.SECOND // 5) # 200ms timeout
|
|
422
|
+
)
|
|
423
|
+
except Exception:
|
|
424
|
+
first_sample = None
|
|
425
|
+
if first_sample:
|
|
426
|
+
warmup_success = True
|
|
427
|
+
break
|
|
428
|
+
await asyncio.sleep(0.02) # Very short delay between attempts
|
|
429
|
+
else:
|
|
430
|
+
# First time warmup - longer timeout with initial delay
|
|
431
|
+
warmup_timeout = 10.0
|
|
432
|
+
await asyncio.sleep(0.1) # Brief initial delay
|
|
433
|
+
|
|
434
|
+
warmup_attempts = 0
|
|
435
|
+
while time.time() - warmup_start < warmup_timeout:
|
|
436
|
+
if self.stop_event.is_set():
|
|
437
|
+
break
|
|
438
|
+
|
|
439
|
+
warmup_attempts += 1
|
|
440
|
+
|
|
441
|
+
# Check for errors on the bus first
|
|
442
|
+
bus = pipeline.get_bus()
|
|
443
|
+
if bus:
|
|
444
|
+
error_msg = bus.pop_filtered(Gst.MessageType.ERROR)
|
|
445
|
+
if error_msg:
|
|
446
|
+
err, debug = error_msg.parse_error()
|
|
447
|
+
raise RuntimeError(f"Pipeline error during warmup: {err.message}")
|
|
448
|
+
|
|
449
|
+
# Try to pull first sample with 500ms timeout
|
|
450
|
+
try:
|
|
451
|
+
first_sample = await asyncio.get_event_loop().run_in_executor(
|
|
452
|
+
None,
|
|
453
|
+
lambda: appsink.try_pull_sample(Gst.SECOND // 2) # 500ms timeout
|
|
454
|
+
)
|
|
455
|
+
except Exception as pull_err:
|
|
456
|
+
self.logger.debug(f"Warmup pull attempt {warmup_attempts} failed: {pull_err}")
|
|
457
|
+
first_sample = None
|
|
458
|
+
|
|
459
|
+
if first_sample:
|
|
460
|
+
self.logger.info(
|
|
461
|
+
f"Worker {self.worker_id}: Pipeline {stream_key} warmed up "
|
|
462
|
+
f"in {time.time() - warmup_start:.2f}s (attempt {warmup_attempts})"
|
|
463
|
+
)
|
|
464
|
+
warmup_success = True
|
|
465
|
+
has_warmed_up_once = True
|
|
466
|
+
break
|
|
467
|
+
|
|
468
|
+
# Short delay between attempts
|
|
469
|
+
await asyncio.sleep(0.1)
|
|
470
|
+
|
|
471
|
+
if not warmup_success and not self.stop_event.is_set():
|
|
472
|
+
if not has_warmed_up_once:
|
|
473
|
+
self.logger.warning(
|
|
474
|
+
f"Worker {self.worker_id}: Pipeline {stream_key} warmup failed after "
|
|
475
|
+
f"{warmup_attempts} attempts ({time.time() - warmup_start:.2f}s)"
|
|
476
|
+
)
|
|
477
|
+
raise RuntimeError(f"Pipeline failed to produce frames within {warmup_timeout}s")
|
|
478
|
+
|
|
479
|
+
# Process the first frame pulled during warmup
|
|
480
|
+
if first_sample and not self.stop_event.is_set():
|
|
481
|
+
buffer = first_sample.get_buffer()
|
|
482
|
+
size = buffer.get_size()
|
|
483
|
+
|
|
484
|
+
success, map_info = buffer.map(Gst.MapFlags.READ)
|
|
485
|
+
if success:
|
|
486
|
+
frame_data = bytes(map_info.data)
|
|
487
|
+
buffer.unmap(map_info)
|
|
488
|
+
|
|
489
|
+
# Initialize PTS tracking
|
|
490
|
+
start_pts = buffer.pts
|
|
491
|
+
wall_start_time = time.time()
|
|
492
|
+
frame_counter = 1
|
|
493
|
+
|
|
494
|
+
# Send first frame
|
|
495
|
+
await self._process_and_send_frame(
|
|
496
|
+
frame_data, stream_key, stream_group_key, topic,
|
|
497
|
+
fps, quality, width, height, camera_location,
|
|
498
|
+
size, 0.0, frame_counter, 0.0
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
# INNER LOOP: Process frames
|
|
502
|
+
frame_interval = 1.0 / fps
|
|
503
|
+
|
|
504
|
+
while not self.stop_event.is_set():
|
|
505
|
+
try:
|
|
506
|
+
loop_start = time.time()
|
|
507
|
+
|
|
508
|
+
# Check for EOS (End-of-Stream) from GStreamer bus
|
|
509
|
+
# This happens when video files reach the end
|
|
510
|
+
bus = pipeline.get_bus()
|
|
511
|
+
if bus:
|
|
512
|
+
msg = bus.pop_filtered(Gst.MessageType.EOS)
|
|
513
|
+
if msg:
|
|
514
|
+
if is_video_file:
|
|
515
|
+
if simulate_video_file_stream:
|
|
516
|
+
self.logger.info(
|
|
517
|
+
f"Worker {self.worker_id}: Video {stream_key} reached end, "
|
|
518
|
+
f"restarting (simulate_video_file_stream=True)"
|
|
519
|
+
)
|
|
520
|
+
await asyncio.sleep(1.0)
|
|
521
|
+
break # Restart in outer loop
|
|
522
|
+
else:
|
|
523
|
+
self.logger.info(
|
|
524
|
+
f"Worker {self.worker_id}: Video {stream_key} playback complete "
|
|
525
|
+
f"(simulate_video_file_stream=False)"
|
|
526
|
+
)
|
|
527
|
+
return # Exit completely
|
|
528
|
+
else:
|
|
529
|
+
# Camera EOS is unexpected - treat as error
|
|
530
|
+
self.logger.warning(
|
|
531
|
+
f"Worker {self.worker_id}: Unexpected EOS from camera {stream_key}, "
|
|
532
|
+
f"will reconnect"
|
|
533
|
+
)
|
|
534
|
+
consecutive_failures += 1
|
|
535
|
+
if consecutive_failures >= 10:
|
|
536
|
+
break
|
|
537
|
+
|
|
538
|
+
# Pull frame (with timeout)
|
|
539
|
+
read_start = time.time()
|
|
540
|
+
sample = await asyncio.get_event_loop().run_in_executor(
|
|
541
|
+
None,
|
|
542
|
+
lambda: appsink.try_pull_sample(Gst.SECOND // 10) # 100ms timeout
|
|
543
|
+
)
|
|
544
|
+
read_time = time.time() - read_start
|
|
545
|
+
|
|
546
|
+
if not sample:
|
|
547
|
+
consecutive_failures += 1
|
|
548
|
+
|
|
549
|
+
# Note: EOS handling is done above via bus messages
|
|
550
|
+
# This section handles frame read failures (network issues, etc.)
|
|
551
|
+
if consecutive_failures >= 10:
|
|
552
|
+
if is_video_file:
|
|
553
|
+
self.logger.info(
|
|
554
|
+
f"Worker {self.worker_id}: Video {stream_key} - "
|
|
555
|
+
f"no frames available, restarting"
|
|
556
|
+
)
|
|
557
|
+
else:
|
|
558
|
+
self.logger.warning(
|
|
559
|
+
f"Worker {self.worker_id}: Camera {stream_key} - "
|
|
560
|
+
f"reconnecting after {consecutive_failures} failures"
|
|
561
|
+
)
|
|
562
|
+
break
|
|
563
|
+
|
|
564
|
+
await asyncio.sleep(0.01)
|
|
565
|
+
continue
|
|
566
|
+
|
|
567
|
+
consecutive_failures = 0
|
|
568
|
+
frame_counter += 1
|
|
569
|
+
|
|
570
|
+
now = time.time()
|
|
571
|
+
buffer = sample.get_buffer()
|
|
572
|
+
size = buffer.get_size()
|
|
573
|
+
|
|
574
|
+
# Get frame data
|
|
575
|
+
success, map_info = buffer.map(Gst.MapFlags.READ)
|
|
576
|
+
if not success:
|
|
577
|
+
continue
|
|
578
|
+
frame_data = bytes(map_info.data)
|
|
579
|
+
buffer.unmap(map_info)
|
|
580
|
+
|
|
581
|
+
# Calculate latency using PTS
|
|
582
|
+
pts = buffer.pts
|
|
583
|
+
latency_ms = 0.0
|
|
584
|
+
|
|
585
|
+
if start_pts is None:
|
|
586
|
+
start_pts = pts
|
|
587
|
+
wall_start_time = now
|
|
588
|
+
|
|
589
|
+
if pts != Gst.CLOCK_TIME_NONE and start_pts is not None:
|
|
590
|
+
stream_time = (pts - start_pts) / Gst.SECOND
|
|
591
|
+
wall_time = now - wall_start_time
|
|
592
|
+
encode_latency = wall_time - stream_time
|
|
593
|
+
latency_ms = max(0, encode_latency * 1000)
|
|
594
|
+
|
|
595
|
+
# Track metrics
|
|
596
|
+
self._frames_encoded += 1
|
|
597
|
+
self._encoding_times.append(latency_ms / 1000)
|
|
598
|
+
|
|
599
|
+
# Send frame
|
|
600
|
+
await self._process_and_send_frame(
|
|
601
|
+
frame_data, stream_key, stream_group_key, topic,
|
|
602
|
+
fps, quality, width, height, camera_location,
|
|
603
|
+
size, latency_ms, frame_counter, read_time
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
# Log metrics periodically
|
|
607
|
+
if time.time() - self._last_metrics_log > self._metrics_log_interval:
|
|
608
|
+
self._last_metrics_log = time.time()
|
|
609
|
+
await self._log_metrics()
|
|
610
|
+
|
|
611
|
+
# Maintain FPS
|
|
612
|
+
elapsed = time.time() - loop_start
|
|
613
|
+
sleep_time = max(0, frame_interval - elapsed)
|
|
614
|
+
if sleep_time > 0:
|
|
615
|
+
await asyncio.sleep(sleep_time)
|
|
616
|
+
|
|
617
|
+
except asyncio.CancelledError:
|
|
618
|
+
return
|
|
619
|
+
except Exception as exc:
|
|
620
|
+
self.logger.error(f"Frame error in {stream_key}: {exc}")
|
|
621
|
+
consecutive_failures += 1
|
|
622
|
+
if consecutive_failures >= 10:
|
|
623
|
+
break
|
|
624
|
+
await asyncio.sleep(0.1)
|
|
625
|
+
|
|
626
|
+
except asyncio.CancelledError:
|
|
627
|
+
return
|
|
628
|
+
except Exception as exc:
|
|
629
|
+
self.logger.error(f"Pipeline error for {stream_key}: {exc}", exc_info=True)
|
|
630
|
+
self._encoding_errors += 1
|
|
631
|
+
|
|
632
|
+
finally:
|
|
633
|
+
if pipeline:
|
|
634
|
+
pipeline.set_state(Gst.State.NULL)
|
|
635
|
+
if stream_key in self.pipelines:
|
|
636
|
+
del self.pipelines[stream_key]
|
|
637
|
+
|
|
638
|
+
# Determine retry behavior based on source type and simulation flag
|
|
639
|
+
if self.stop_event.is_set():
|
|
640
|
+
break
|
|
641
|
+
|
|
642
|
+
if is_video_file and simulate_video_file_stream:
|
|
643
|
+
# Video file with simulation enabled - restart immediately (no backoff)
|
|
644
|
+
self.logger.info(
|
|
645
|
+
f"Worker {self.worker_id}: Restarting video {stream_key} immediately "
|
|
646
|
+
f"for continuous simulation"
|
|
647
|
+
)
|
|
648
|
+
continue # Restart immediately
|
|
649
|
+
elif is_video_file and not simulate_video_file_stream:
|
|
650
|
+
# Video file without simulation - playback complete, exit cleanly
|
|
651
|
+
self.logger.info(
|
|
652
|
+
f"Worker {self.worker_id}: Video {stream_key} playback complete "
|
|
653
|
+
f"(simulation disabled)"
|
|
654
|
+
)
|
|
655
|
+
break # Exit outer loop
|
|
656
|
+
else:
|
|
657
|
+
# Camera or RTSP stream - apply exponential backoff for reconnection
|
|
658
|
+
cooldown = min(MAX_RETRY_COOLDOWN, MIN_RETRY_COOLDOWN + retry_cycle)
|
|
659
|
+
self.logger.info(f"Worker {self.worker_id}: Retrying camera {stream_key} in {cooldown}s")
|
|
660
|
+
await asyncio.sleep(cooldown)
|
|
661
|
+
retry_cycle += 1
|
|
662
|
+
|
|
663
|
+
self.logger.info(f"Worker {self.worker_id}: Camera handler for {stream_key} exited")
|
|
664
|
+
|
|
665
|
+
async def _process_and_send_frame(
|
|
666
|
+
self,
|
|
667
|
+
frame_data: bytes,
|
|
668
|
+
stream_key: str,
|
|
669
|
+
stream_group_key: str,
|
|
670
|
+
topic: str,
|
|
671
|
+
fps: int,
|
|
672
|
+
quality: int,
|
|
673
|
+
width: int,
|
|
674
|
+
height: int,
|
|
675
|
+
camera_location: str,
|
|
676
|
+
frame_size: int,
|
|
677
|
+
latency_ms: float,
|
|
678
|
+
frame_counter: int,
|
|
679
|
+
read_time: float
|
|
680
|
+
):
|
|
681
|
+
"""Build and send frame message to Redis.
|
|
682
|
+
|
|
683
|
+
NOTE: GStreamer frame optimization uses hash-based similarity for identical frames.
|
|
684
|
+
See gstreamer_camera_streamer.py for detailed explanation of limitations.
|
|
685
|
+
"""
|
|
686
|
+
last_read, last_write, last_process = self.statistics.get_timing(stream_key)
|
|
687
|
+
input_order = self.statistics.get_next_input_order(stream_key)
|
|
688
|
+
|
|
689
|
+
# Check frame similarity using hash-based detection (identical frames only)
|
|
690
|
+
is_similar = False
|
|
691
|
+
reference_frame_id = self._last_sent_frame_ids.get(stream_key)
|
|
692
|
+
|
|
693
|
+
if self.frame_optimizer and reference_frame_id:
|
|
694
|
+
import hashlib
|
|
695
|
+
frame_hash = hashlib.md5(frame_data).hexdigest()
|
|
696
|
+
last_hash = self._last_frame_hashes.get(stream_key)
|
|
697
|
+
|
|
698
|
+
if last_hash == frame_hash:
|
|
699
|
+
is_similar = True
|
|
700
|
+
else:
|
|
701
|
+
self._last_frame_hashes[stream_key] = frame_hash
|
|
702
|
+
|
|
703
|
+
metadata = {
|
|
704
|
+
"source": stream_key,
|
|
705
|
+
"fps": fps,
|
|
706
|
+
"quality": quality,
|
|
707
|
+
"width": width,
|
|
708
|
+
"height": height,
|
|
709
|
+
"camera_location": camera_location,
|
|
710
|
+
"feed_type": "camera",
|
|
711
|
+
"frame_count": 1,
|
|
712
|
+
"stream_unit": "frame",
|
|
713
|
+
"encoder": f"gstreamer-{self._detected_encoder}",
|
|
714
|
+
"codec": self.gstreamer_codec,
|
|
715
|
+
"encoding_latency_ms": latency_ms,
|
|
716
|
+
"frame_number": frame_counter,
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
# If frame is identical, send cached reference
|
|
720
|
+
if is_similar and reference_frame_id:
|
|
721
|
+
metadata["similarity_score"] = 1.0
|
|
722
|
+
codec = "cached"
|
|
723
|
+
frame_data_to_send = b""
|
|
724
|
+
else:
|
|
725
|
+
codec = "jpeg" if self._detected_encoder == "jpeg" else self.gstreamer_codec
|
|
726
|
+
frame_data_to_send = frame_data
|
|
727
|
+
|
|
728
|
+
message = self.message_builder.build_message(
|
|
729
|
+
frame_data=frame_data_to_send,
|
|
730
|
+
stream_key=stream_key,
|
|
731
|
+
stream_group_key=stream_group_key,
|
|
732
|
+
codec=codec,
|
|
733
|
+
metadata=metadata,
|
|
734
|
+
topic=topic,
|
|
735
|
+
broker_config=self.stream_config.get('bootstrap_servers', 'localhost'),
|
|
736
|
+
input_order=input_order,
|
|
737
|
+
last_read_time=last_read,
|
|
738
|
+
last_write_time=last_write,
|
|
739
|
+
last_process_time=last_process,
|
|
740
|
+
cached_frame_id=reference_frame_id if is_similar else None,
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
write_start = time.time()
|
|
744
|
+
await self.redis_client.add_message(topic, message)
|
|
745
|
+
write_time = time.time() - write_start
|
|
746
|
+
|
|
747
|
+
# Track frame_id for future cached references
|
|
748
|
+
if not is_similar:
|
|
749
|
+
new_frame_id = message.get("frame_id")
|
|
750
|
+
if new_frame_id:
|
|
751
|
+
self._last_sent_frame_ids[stream_key] = new_frame_id
|
|
752
|
+
|
|
753
|
+
# Update statistics
|
|
754
|
+
if is_similar:
|
|
755
|
+
self.statistics.increment_frames_skipped()
|
|
756
|
+
process_time = read_time + write_time
|
|
757
|
+
self.statistics.update_timing(
|
|
758
|
+
stream_key, read_time, write_time, process_time,
|
|
759
|
+
0, 0 # No frame size or encoding time for cached
|
|
760
|
+
)
|
|
761
|
+
else:
|
|
762
|
+
self.statistics.increment_frames_sent()
|
|
763
|
+
process_time = read_time + write_time
|
|
764
|
+
# Note: latency_ms is PTS-based, not pure encoding time
|
|
765
|
+
self.statistics.update_timing(
|
|
766
|
+
stream_key, read_time, write_time, process_time,
|
|
767
|
+
frame_size, latency_ms / 1000
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
total_frame_time = write_time + (latency_ms / 1000)
|
|
771
|
+
self._frame_times.append(total_frame_time)
|
|
772
|
+
|
|
773
|
+
async def _log_metrics(self):
|
|
774
|
+
"""Log worker metrics."""
|
|
775
|
+
try:
|
|
776
|
+
# Per-camera
|
|
777
|
+
for stream_key in self.camera_tasks.keys():
|
|
778
|
+
self.statistics.log_detailed_stats(stream_key)
|
|
779
|
+
|
|
780
|
+
# Worker-level
|
|
781
|
+
if self._encoding_times:
|
|
782
|
+
avg_ms = sum(self._encoding_times) / len(self._encoding_times) * 1000
|
|
783
|
+
self.logger.info(
|
|
784
|
+
f"Worker {self.worker_id} GStreamer: "
|
|
785
|
+
f"encoder={self._detected_encoder}, "
|
|
786
|
+
f"frames={self._frames_encoded}, "
|
|
787
|
+
f"errors={self._encoding_errors}, "
|
|
788
|
+
f"avg_latency={avg_ms:.2f}ms"
|
|
789
|
+
)
|
|
790
|
+
|
|
791
|
+
# Resources
|
|
792
|
+
cpu = self._process_info.cpu_percent(interval=0.1)
|
|
793
|
+
mem = self._process_info.memory_info().rss / 1024 / 1024
|
|
794
|
+
self.logger.info(
|
|
795
|
+
f"Worker {self.worker_id} Resources: CPU={cpu:.1f}%, Memory={mem:.1f}MB"
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
except Exception as exc:
|
|
799
|
+
self.logger.warning(f"Failed to log metrics: {exc}")
|
|
800
|
+
|
|
801
|
+
async def _command_handler(self):
|
|
802
|
+
"""Process commands from manager."""
|
|
803
|
+
while not self.stop_event.is_set():
|
|
804
|
+
try:
|
|
805
|
+
command = await asyncio.get_event_loop().run_in_executor(
|
|
806
|
+
None, self._get_command_nonblocking
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
if command:
|
|
810
|
+
await self._process_command(command)
|
|
811
|
+
else:
|
|
812
|
+
await asyncio.sleep(0.1)
|
|
813
|
+
|
|
814
|
+
except asyncio.CancelledError:
|
|
815
|
+
break
|
|
816
|
+
except Exception as exc:
|
|
817
|
+
self.logger.error(f"Command handler error: {exc}")
|
|
818
|
+
await asyncio.sleep(1.0)
|
|
819
|
+
|
|
820
|
+
def _get_command_nonblocking(self):
|
|
821
|
+
"""Get command without blocking."""
|
|
822
|
+
try:
|
|
823
|
+
return self.command_queue.get_nowait()
|
|
824
|
+
except Exception:
|
|
825
|
+
return None
|
|
826
|
+
|
|
827
|
+
async def _process_command(self, command: Dict[str, Any]):
|
|
828
|
+
"""Process a command."""
|
|
829
|
+
cmd_type = command.get('type')
|
|
830
|
+
self.logger.info(f"Processing command: {cmd_type}")
|
|
831
|
+
|
|
832
|
+
try:
|
|
833
|
+
if cmd_type == 'add_camera':
|
|
834
|
+
camera_config = command.get('camera_config')
|
|
835
|
+
success = await self._add_camera_internal(camera_config)
|
|
836
|
+
self._send_response(cmd_type, camera_config.get('stream_key'), success)
|
|
837
|
+
|
|
838
|
+
elif cmd_type == 'remove_camera':
|
|
839
|
+
stream_key = command.get('stream_key')
|
|
840
|
+
success = await self._remove_camera_internal(stream_key)
|
|
841
|
+
self._send_response(cmd_type, stream_key, success)
|
|
842
|
+
|
|
843
|
+
elif cmd_type == 'update_camera':
|
|
844
|
+
camera_config = command.get('camera_config')
|
|
845
|
+
stream_key = command.get('stream_key')
|
|
846
|
+
await self._remove_camera_internal(stream_key)
|
|
847
|
+
success = await self._add_camera_internal(camera_config)
|
|
848
|
+
self._send_response(cmd_type, stream_key, success)
|
|
849
|
+
|
|
850
|
+
except Exception as exc:
|
|
851
|
+
self.logger.error(f"Error processing {cmd_type}: {exc}")
|
|
852
|
+
self._send_response(cmd_type, command.get('stream_key'), False, str(exc))
|
|
853
|
+
|
|
854
|
+
async def _add_camera_internal(self, camera_config: Dict[str, Any]) -> bool:
|
|
855
|
+
"""Add camera and start task."""
|
|
856
|
+
stream_key = camera_config.get('stream_key')
|
|
857
|
+
|
|
858
|
+
if not stream_key:
|
|
859
|
+
return False
|
|
860
|
+
|
|
861
|
+
if stream_key in self.camera_tasks:
|
|
862
|
+
self.logger.warning(f"Camera {stream_key} already exists")
|
|
863
|
+
return False
|
|
864
|
+
|
|
865
|
+
try:
|
|
866
|
+
task = asyncio.create_task(
|
|
867
|
+
self._camera_handler(camera_config),
|
|
868
|
+
name=f"gst-camera-{stream_key}"
|
|
869
|
+
)
|
|
870
|
+
self.camera_tasks[stream_key] = task
|
|
871
|
+
self.logger.info(f"Added GStreamer camera {stream_key}")
|
|
872
|
+
return True
|
|
873
|
+
|
|
874
|
+
except Exception as exc:
|
|
875
|
+
self.logger.error(f"Failed to add camera {stream_key}: {exc}")
|
|
876
|
+
return False
|
|
877
|
+
|
|
878
|
+
async def _remove_camera_internal(self, stream_key: str) -> bool:
|
|
879
|
+
"""Remove camera and stop task."""
|
|
880
|
+
if stream_key not in self.camera_tasks:
|
|
881
|
+
return False
|
|
882
|
+
|
|
883
|
+
try:
|
|
884
|
+
task = self.camera_tasks[stream_key]
|
|
885
|
+
if not task.done():
|
|
886
|
+
task.cancel()
|
|
887
|
+
try:
|
|
888
|
+
await asyncio.wait_for(task, timeout=5.0)
|
|
889
|
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
890
|
+
pass
|
|
891
|
+
|
|
892
|
+
del self.camera_tasks[stream_key]
|
|
893
|
+
|
|
894
|
+
if stream_key in self.pipelines:
|
|
895
|
+
self.pipelines[stream_key].set_state(Gst.State.NULL)
|
|
896
|
+
del self.pipelines[stream_key]
|
|
897
|
+
|
|
898
|
+
self.logger.info(f"Removed camera {stream_key}")
|
|
899
|
+
return True
|
|
900
|
+
|
|
901
|
+
except Exception as exc:
|
|
902
|
+
self.logger.error(f"Error removing {stream_key}: {exc}")
|
|
903
|
+
return False
|
|
904
|
+
|
|
905
|
+
def _send_response(self, cmd_type: str, stream_key: str, success: bool, error: str = None):
|
|
906
|
+
"""Send response to manager."""
|
|
907
|
+
if self.response_queue:
|
|
908
|
+
try:
|
|
909
|
+
self.response_queue.put_nowait({
|
|
910
|
+
'worker_id': self.worker_id,
|
|
911
|
+
'command_type': cmd_type,
|
|
912
|
+
'stream_key': stream_key,
|
|
913
|
+
'success': success,
|
|
914
|
+
'error': error,
|
|
915
|
+
'timestamp': time.time()
|
|
916
|
+
})
|
|
917
|
+
except Exception as exc:
|
|
918
|
+
self.logger.warning(f"Failed to send response: {exc}")
|
|
919
|
+
|
|
920
|
+
async def _shutdown(self):
|
|
921
|
+
"""Graceful shutdown."""
|
|
922
|
+
self.logger.info(f"Worker {self.worker_id}: Shutting down")
|
|
923
|
+
|
|
924
|
+
# Cancel tasks
|
|
925
|
+
for stream_key, task in self.camera_tasks.items():
|
|
926
|
+
if not task.done():
|
|
927
|
+
task.cancel()
|
|
928
|
+
|
|
929
|
+
if self.camera_tasks:
|
|
930
|
+
await asyncio.gather(*self.camera_tasks.values(), return_exceptions=True)
|
|
931
|
+
|
|
932
|
+
# Stop pipelines
|
|
933
|
+
for stream_key, pipeline in list(self.pipelines.items()):
|
|
934
|
+
pipeline.set_state(Gst.State.NULL)
|
|
935
|
+
self.pipelines.clear()
|
|
936
|
+
|
|
937
|
+
# Close Redis
|
|
938
|
+
if self.redis_client:
|
|
939
|
+
await self.redis_client.close()
|
|
940
|
+
|
|
941
|
+
self._report_health("stopped")
|
|
942
|
+
self.logger.info(f"Worker {self.worker_id}: Shutdown complete")
|
|
943
|
+
|
|
944
|
+
def _report_health(self, status: str, active_cameras: int = 0, error: Optional[str] = None):
|
|
945
|
+
"""Report health status."""
|
|
946
|
+
try:
|
|
947
|
+
cpu = 0
|
|
948
|
+
mem = 0
|
|
949
|
+
try:
|
|
950
|
+
cpu = self._process_info.cpu_percent(interval=None)
|
|
951
|
+
mem = self._process_info.memory_info().rss / 1024 / 1024
|
|
952
|
+
except Exception:
|
|
953
|
+
pass
|
|
954
|
+
|
|
955
|
+
avg_encoding_ms = 0
|
|
956
|
+
if self._encoding_times:
|
|
957
|
+
avg_encoding_ms = sum(self._encoding_times) / len(self._encoding_times) * 1000
|
|
958
|
+
|
|
959
|
+
health_report = {
|
|
960
|
+
'worker_id': self.worker_id,
|
|
961
|
+
'status': status,
|
|
962
|
+
'active_cameras': active_cameras,
|
|
963
|
+
'timestamp': time.time(),
|
|
964
|
+
'error': error,
|
|
965
|
+
'worker_type': 'gstreamer',
|
|
966
|
+
'encoder': self._detected_encoder,
|
|
967
|
+
'metrics': {
|
|
968
|
+
'cpu_percent': cpu,
|
|
969
|
+
'memory_mb': mem,
|
|
970
|
+
'frames_encoded': self._frames_encoded,
|
|
971
|
+
'encoding_errors': self._encoding_errors,
|
|
972
|
+
'avg_encoding_ms': avg_encoding_ms,
|
|
973
|
+
},
|
|
974
|
+
}
|
|
975
|
+
self.health_queue.put_nowait(health_report)
|
|
976
|
+
|
|
977
|
+
except Exception as exc:
|
|
978
|
+
self.logger.warning(f"Failed to report health: {exc}")
|
|
979
|
+
|
|
980
|
+
|
|
981
|
+
def run_gstreamer_worker(
|
|
982
|
+
worker_id: int,
|
|
983
|
+
camera_configs: List[Dict[str, Any]],
|
|
984
|
+
stream_config: Dict[str, Any],
|
|
985
|
+
stop_event: multiprocessing.Event,
|
|
986
|
+
health_queue: multiprocessing.Queue,
|
|
987
|
+
command_queue: multiprocessing.Queue = None,
|
|
988
|
+
response_queue: multiprocessing.Queue = None,
|
|
989
|
+
gstreamer_encoder: str = "auto",
|
|
990
|
+
gstreamer_codec: str = "h264",
|
|
991
|
+
gstreamer_preset: str = "low-latency",
|
|
992
|
+
gpu_id: int = 0,
|
|
993
|
+
platform: str = "auto",
|
|
994
|
+
use_hardware_decode: bool = True,
|
|
995
|
+
use_hardware_jpeg: bool = True,
|
|
996
|
+
jetson_use_nvmm: bool = True,
|
|
997
|
+
frame_optimizer_mode: str = "hash-only",
|
|
998
|
+
fallback_on_error: bool = True,
|
|
999
|
+
verbose_pipeline_logging: bool = False,
|
|
1000
|
+
):
|
|
1001
|
+
"""Entry point for GStreamer worker process.
|
|
1002
|
+
|
|
1003
|
+
Args:
|
|
1004
|
+
worker_id: Worker identifier
|
|
1005
|
+
camera_configs: Camera configurations
|
|
1006
|
+
stream_config: Streaming configuration
|
|
1007
|
+
stop_event: Shutdown event
|
|
1008
|
+
health_queue: Health reporting queue
|
|
1009
|
+
command_queue: Command queue
|
|
1010
|
+
response_queue: Response queue
|
|
1011
|
+
gstreamer_encoder: Encoder type
|
|
1012
|
+
gstreamer_codec: Codec
|
|
1013
|
+
gstreamer_preset: NVENC preset
|
|
1014
|
+
gpu_id: GPU device ID
|
|
1015
|
+
platform: Platform override
|
|
1016
|
+
use_hardware_decode: Enable hardware decode
|
|
1017
|
+
use_hardware_jpeg: Enable hardware JPEG
|
|
1018
|
+
jetson_use_nvmm: Use NVMM on Jetson
|
|
1019
|
+
frame_optimizer_mode: Frame optimization mode
|
|
1020
|
+
fallback_on_error: Fallback to CPU on errors
|
|
1021
|
+
verbose_pipeline_logging: Verbose logging
|
|
1022
|
+
"""
|
|
1023
|
+
logging.basicConfig(
|
|
1024
|
+
level=logging.INFO,
|
|
1025
|
+
format=f'%(asctime)s - GStreamerWorker-{worker_id} - %(levelname)s - %(message)s'
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
logger = logging.getLogger(f"GStreamerWorker-{worker_id}")
|
|
1029
|
+
logger.info(f"Starting GStreamer worker {worker_id}")
|
|
1030
|
+
|
|
1031
|
+
try:
|
|
1032
|
+
worker = GStreamerAsyncWorker(
|
|
1033
|
+
worker_id=worker_id,
|
|
1034
|
+
camera_configs=camera_configs,
|
|
1035
|
+
stream_config=stream_config,
|
|
1036
|
+
stop_event=stop_event,
|
|
1037
|
+
health_queue=health_queue,
|
|
1038
|
+
command_queue=command_queue,
|
|
1039
|
+
response_queue=response_queue,
|
|
1040
|
+
gstreamer_encoder=gstreamer_encoder,
|
|
1041
|
+
gstreamer_codec=gstreamer_codec,
|
|
1042
|
+
gstreamer_preset=gstreamer_preset,
|
|
1043
|
+
gpu_id=gpu_id,
|
|
1044
|
+
platform=platform,
|
|
1045
|
+
use_hardware_decode=use_hardware_decode,
|
|
1046
|
+
use_hardware_jpeg=use_hardware_jpeg,
|
|
1047
|
+
jetson_use_nvmm=jetson_use_nvmm,
|
|
1048
|
+
frame_optimizer_mode=frame_optimizer_mode,
|
|
1049
|
+
fallback_on_error=fallback_on_error,
|
|
1050
|
+
verbose_pipeline_logging=verbose_pipeline_logging,
|
|
1051
|
+
)
|
|
1052
|
+
|
|
1053
|
+
asyncio.run(worker.run())
|
|
1054
|
+
|
|
1055
|
+
except Exception as exc:
|
|
1056
|
+
logger.error(f"Worker {worker_id} failed: {exc}", exc_info=True)
|
|
1057
|
+
raise
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
def is_gstreamer_available() -> bool:
|
|
1061
|
+
"""Check if GStreamer is available."""
|
|
1062
|
+
return GST_AVAILABLE
|
|
1063
|
+
|