matrice-streaming 0.1.14__py3-none-any.whl → 0.1.65__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. matrice_streaming/__init__.py +44 -32
  2. matrice_streaming/streaming_gateway/camera_streamer/__init__.py +68 -1
  3. matrice_streaming/streaming_gateway/camera_streamer/async_camera_worker.py +1388 -0
  4. matrice_streaming/streaming_gateway/camera_streamer/async_ffmpeg_worker.py +966 -0
  5. matrice_streaming/streaming_gateway/camera_streamer/camera_streamer.py +188 -24
  6. matrice_streaming/streaming_gateway/camera_streamer/device_detection.py +507 -0
  7. matrice_streaming/streaming_gateway/camera_streamer/encoding_pool_manager.py +136 -0
  8. matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_camera_streamer.py +1048 -0
  9. matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_config.py +192 -0
  10. matrice_streaming/streaming_gateway/camera_streamer/ffmpeg_worker_manager.py +470 -0
  11. matrice_streaming/streaming_gateway/camera_streamer/gstreamer_camera_streamer.py +1368 -0
  12. matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker.py +1063 -0
  13. matrice_streaming/streaming_gateway/camera_streamer/gstreamer_worker_manager.py +546 -0
  14. matrice_streaming/streaming_gateway/camera_streamer/message_builder.py +60 -15
  15. matrice_streaming/streaming_gateway/camera_streamer/nvdec.py +1330 -0
  16. matrice_streaming/streaming_gateway/camera_streamer/nvdec_worker_manager.py +412 -0
  17. matrice_streaming/streaming_gateway/camera_streamer/platform_pipelines.py +680 -0
  18. matrice_streaming/streaming_gateway/camera_streamer/stream_statistics.py +111 -4
  19. matrice_streaming/streaming_gateway/camera_streamer/video_capture_manager.py +223 -27
  20. matrice_streaming/streaming_gateway/camera_streamer/worker_manager.py +694 -0
  21. matrice_streaming/streaming_gateway/debug/__init__.py +27 -2
  22. matrice_streaming/streaming_gateway/debug/benchmark.py +727 -0
  23. matrice_streaming/streaming_gateway/debug/debug_gstreamer_gateway.py +599 -0
  24. matrice_streaming/streaming_gateway/debug/debug_streaming_gateway.py +245 -95
  25. matrice_streaming/streaming_gateway/debug/debug_utils.py +29 -0
  26. matrice_streaming/streaming_gateway/debug/test_videoplayback.py +318 -0
  27. matrice_streaming/streaming_gateway/dynamic_camera_manager.py +656 -39
  28. matrice_streaming/streaming_gateway/metrics_reporter.py +676 -139
  29. matrice_streaming/streaming_gateway/streaming_action.py +71 -20
  30. matrice_streaming/streaming_gateway/streaming_gateway.py +1026 -78
  31. matrice_streaming/streaming_gateway/streaming_gateway_utils.py +175 -20
  32. matrice_streaming/streaming_gateway/streaming_status_listener.py +89 -0
  33. {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/METADATA +1 -1
  34. matrice_streaming-0.1.65.dist-info/RECORD +56 -0
  35. matrice_streaming-0.1.14.dist-info/RECORD +0 -38
  36. {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/WHEEL +0 -0
  37. {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/licenses/LICENSE.txt +0 -0
  38. {matrice_streaming-0.1.14.dist-info → matrice_streaming-0.1.65.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,507 @@
1
+ """Platform detection and capability discovery for GStreamer optimization.
2
+
3
+ This module provides hardware platform detection (Jetson, Desktop NVIDIA GPU, Intel/AMD GPU, CPU-only)
4
+ and GStreamer element capability checking to enable platform-specific pipeline optimization.
5
+ """
6
+ import logging
7
+ import os
8
+ import platform
9
+ import subprocess
10
+ import threading
11
+ from dataclasses import dataclass, field
12
+ from enum import Enum
13
+ from typing import Dict, List, Optional, Set
14
+
15
+ # GStreamer imports
16
+ try:
17
+ import gi
18
+ gi.require_version('Gst', '1.0')
19
+ from gi.repository import Gst
20
+ GST_AVAILABLE = True
21
+ except (ImportError, ValueError):
22
+ GST_AVAILABLE = False
23
+
24
+
25
+ class PlatformType(Enum):
26
+ """Supported hardware platform types."""
27
+ JETSON = "jetson"
28
+ DESKTOP_NVIDIA_GPU = "desktop-nvidia-gpu"
29
+ INTEL_GPU = "intel-gpu"
30
+ AMD_GPU = "amd-gpu"
31
+ CPU_ONLY = "cpu-only"
32
+
33
+
34
+ @dataclass
35
+ class PlatformInfo:
36
+ """Detected platform information and capabilities."""
37
+
38
+ platform_type: PlatformType
39
+ model: str # e.g., "Jetson Xavier NX", "RTX 4090", "Intel UHD Graphics"
40
+ architecture: str # x86_64, aarch64, etc.
41
+
42
+ # GStreamer element availability
43
+ available_encoders: Set[str] = field(default_factory=set)
44
+ available_decoders: Set[str] = field(default_factory=set)
45
+ available_converters: Set[str] = field(default_factory=set)
46
+
47
+ # Hardware capabilities
48
+ supports_nvmm: bool = False # NVIDIA Memory Model (Jetson)
49
+ supports_cuda_memory: bool = False # CUDA memory (Desktop NVIDIA)
50
+ supports_vaapi: bool = False # Video Acceleration API (Intel/AMD)
51
+ gpu_count: int = 0
52
+
53
+ # Recommended settings
54
+ recommended_encoder: str = "jpeg"
55
+ recommended_decoder: str = "avdec_h264"
56
+ recommended_converter: str = "videoconvert"
57
+ max_workers: Optional[int] = None # Platform-appropriate worker count
58
+
59
+ def __str__(self) -> str:
60
+ return (
61
+ f"Platform: {self.platform_type.value}, Model: {self.model}, "
62
+ f"Arch: {self.architecture}, GPUs: {self.gpu_count}"
63
+ )
64
+
65
+
66
+ class PlatformDetector:
67
+ """Singleton platform detector with caching.
68
+
69
+ Detects hardware platform and GStreamer capabilities, caching results
70
+ for efficient reuse across multiple pipeline instances.
71
+ """
72
+
73
+ _instance: Optional['PlatformDetector'] = None
74
+ _lock = threading.RLock()
75
+ _platform_info: Optional[PlatformInfo] = None
76
+
77
+ def __init__(self):
78
+ """Private constructor - use get_instance() instead."""
79
+ self.logger = logging.getLogger(__name__)
80
+
81
+ # Initialize GStreamer if not already done
82
+ if GST_AVAILABLE and not Gst.is_initialized():
83
+ Gst.init(None)
84
+
85
+ @classmethod
86
+ def get_instance(cls) -> 'PlatformDetector':
87
+ """Get singleton instance of PlatformDetector."""
88
+ if cls._instance is None:
89
+ with cls._lock:
90
+ if cls._instance is None:
91
+ cls._instance = cls()
92
+ return cls._instance
93
+
94
+ def detect(self, force_redetect: bool = False) -> PlatformInfo:
95
+ """Detect platform and cache results.
96
+
97
+ Args:
98
+ force_redetect: Force re-detection even if cached result exists
99
+
100
+ Returns:
101
+ PlatformInfo with detected capabilities
102
+ """
103
+ with self._lock:
104
+ if self._platform_info and not force_redetect:
105
+ return self._platform_info
106
+
107
+ self.logger.info("Detecting hardware platform...")
108
+
109
+ # Detect platform type
110
+ platform_type, model = self._detect_platform_type()
111
+ architecture = platform.machine()
112
+
113
+ # Create PlatformInfo
114
+ platform_info = PlatformInfo(
115
+ platform_type=platform_type,
116
+ model=model,
117
+ architecture=architecture,
118
+ )
119
+
120
+ # Detect GPU count
121
+ platform_info.gpu_count = self._detect_gpu_count(platform_type)
122
+
123
+ # Detect GStreamer capabilities
124
+ self._detect_gstreamer_capabilities(platform_info)
125
+
126
+ # Set recommended settings
127
+ self._set_recommended_settings(platform_info)
128
+
129
+ # Cache and return
130
+ self._platform_info = platform_info
131
+
132
+ self.logger.info(f"Platform detected: {platform_info}")
133
+ self.logger.info(
134
+ f"Available encoders: {', '.join(sorted(platform_info.available_encoders))}"
135
+ )
136
+
137
+ return platform_info
138
+
139
+ def override_platform(self, platform_type: PlatformType, model: str = "Manual Override"):
140
+ """Manually override platform detection (for testing).
141
+
142
+ Args:
143
+ platform_type: Platform type to force
144
+ model: Optional model description
145
+ """
146
+ with self._lock:
147
+ self.logger.warning(f"Manually overriding platform to: {platform_type.value}")
148
+
149
+ platform_info = PlatformInfo(
150
+ platform_type=platform_type,
151
+ model=model,
152
+ architecture=platform.machine(),
153
+ )
154
+
155
+ # Still detect GStreamer capabilities for overridden platform
156
+ self._detect_gstreamer_capabilities(platform_info)
157
+ self._set_recommended_settings(platform_info)
158
+
159
+ self._platform_info = platform_info
160
+
161
+ def clear_cache(self):
162
+ """Clear cached platform detection results."""
163
+ with self._lock:
164
+ self._platform_info = None
165
+ self.logger.info("Cleared platform detection cache")
166
+
167
+ def _detect_platform_type(self) -> tuple:
168
+ """Detect platform type and model.
169
+
170
+ Returns:
171
+ Tuple of (PlatformType, model_string)
172
+ """
173
+ # Check for Jetson (ARM + NVIDIA)
174
+ if self._is_jetson():
175
+ model = self._get_jetson_model()
176
+ return (PlatformType.JETSON, model)
177
+
178
+ # Check for Desktop NVIDIA GPU
179
+ if self._has_nvidia_gpu():
180
+ model = self._get_nvidia_gpu_model()
181
+ return (PlatformType.DESKTOP_NVIDIA_GPU, model)
182
+
183
+ # Check for Intel GPU
184
+ if self._has_intel_gpu():
185
+ model = self._get_intel_gpu_model()
186
+ return (PlatformType.INTEL_GPU, model)
187
+
188
+ # Check for AMD GPU
189
+ if self._has_amd_gpu():
190
+ model = self._get_amd_gpu_model()
191
+ return (PlatformType.AMD_GPU, model)
192
+
193
+ # Fallback to CPU-only
194
+ return (PlatformType.CPU_ONLY, f"{platform.processor() or 'Unknown CPU'}")
195
+
196
+ def _is_jetson(self) -> bool:
197
+ """Check if running on NVIDIA Jetson device."""
198
+ try:
199
+ # Check device tree model (definitive Jetson check)
200
+ if os.path.exists('/proc/device-tree/model'):
201
+ with open('/proc/device-tree/model', 'r') as f:
202
+ model = f.read().lower()
203
+ if 'jetson' in model:
204
+ return True
205
+
206
+ # Secondary check: ARM architecture + nvidia-smi
207
+ if platform.machine() in ('aarch64', 'armv8', 'arm64'):
208
+ if self._has_nvidia_gpu():
209
+ return True
210
+
211
+ except Exception as e:
212
+ self.logger.debug(f"Jetson detection error: {e}")
213
+
214
+ return False
215
+
216
+ def _get_jetson_model(self) -> str:
217
+ """Get Jetson model name."""
218
+ try:
219
+ if os.path.exists('/proc/device-tree/model'):
220
+ with open('/proc/device-tree/model', 'r') as f:
221
+ model = f.read().strip().replace('\x00', '')
222
+ return model
223
+ except Exception:
224
+ pass
225
+
226
+ return "Jetson (Unknown Model)"
227
+
228
+ def _has_nvidia_gpu(self) -> bool:
229
+ """Check if NVIDIA GPU is available."""
230
+ try:
231
+ result = subprocess.run(
232
+ ['nvidia-smi', '--query-gpu=name', '--format=csv,noheader'],
233
+ capture_output=True,
234
+ text=True,
235
+ timeout=2
236
+ )
237
+ return result.returncode == 0 and result.stdout.strip()
238
+ except (FileNotFoundError, subprocess.TimeoutExpired):
239
+ return False
240
+
241
+ def _get_nvidia_gpu_model(self) -> str:
242
+ """Get NVIDIA GPU model name."""
243
+ try:
244
+ result = subprocess.run(
245
+ ['nvidia-smi', '--query-gpu=name', '--format=csv,noheader'],
246
+ capture_output=True,
247
+ text=True,
248
+ timeout=2
249
+ )
250
+ if result.returncode == 0:
251
+ return result.stdout.strip().split('\n')[0]
252
+ except Exception:
253
+ pass
254
+
255
+ return "NVIDIA GPU (Unknown Model)"
256
+
257
+ def _has_intel_gpu(self) -> bool:
258
+ """Check if Intel GPU with VAAPI is available."""
259
+ try:
260
+ result = subprocess.run(
261
+ ['vainfo'],
262
+ capture_output=True,
263
+ text=True,
264
+ timeout=2
265
+ )
266
+ return result.returncode == 0 and 'Intel' in result.stdout
267
+ except (FileNotFoundError, subprocess.TimeoutExpired):
268
+ return False
269
+
270
+ def _get_intel_gpu_model(self) -> str:
271
+ """Get Intel GPU model name."""
272
+ try:
273
+ result = subprocess.run(
274
+ ['vainfo'],
275
+ capture_output=True,
276
+ text=True,
277
+ timeout=2
278
+ )
279
+ if result.returncode == 0:
280
+ # Parse vainfo output for GPU info
281
+ for line in result.stdout.split('\n'):
282
+ if 'Intel' in line and ('Graphics' in line or 'HD' in line or 'Iris' in line):
283
+ return line.strip()
284
+ except Exception:
285
+ pass
286
+
287
+ return "Intel GPU (Unknown Model)"
288
+
289
+ def _has_amd_gpu(self) -> bool:
290
+ """Check if AMD GPU with VAAPI is available."""
291
+ try:
292
+ result = subprocess.run(
293
+ ['vainfo'],
294
+ capture_output=True,
295
+ text=True,
296
+ timeout=2
297
+ )
298
+ return result.returncode == 0 and 'AMD' in result.stdout
299
+ except (FileNotFoundError, subprocess.TimeoutExpired):
300
+ return False
301
+
302
+ def _get_amd_gpu_model(self) -> str:
303
+ """Get AMD GPU model name."""
304
+ try:
305
+ result = subprocess.run(
306
+ ['vainfo'],
307
+ capture_output=True,
308
+ text=True,
309
+ timeout=2
310
+ )
311
+ if result.returncode == 0:
312
+ for line in result.stdout.split('\n'):
313
+ if 'AMD' in line or 'Radeon' in line:
314
+ return line.strip()
315
+ except Exception:
316
+ pass
317
+
318
+ return "AMD GPU (Unknown Model)"
319
+
320
+ def _detect_gpu_count(self, platform_type: PlatformType) -> int:
321
+ """Detect number of available GPUs."""
322
+ if platform_type == PlatformType.CPU_ONLY:
323
+ return 0
324
+
325
+ try:
326
+ if platform_type in (PlatformType.JETSON, PlatformType.DESKTOP_NVIDIA_GPU):
327
+ result = subprocess.run(
328
+ ['nvidia-smi', '--query-gpu=name', '--format=csv,noheader'],
329
+ capture_output=True,
330
+ text=True,
331
+ timeout=2
332
+ )
333
+ if result.returncode == 0:
334
+ return len([line for line in result.stdout.strip().split('\n') if line])
335
+ except Exception:
336
+ pass
337
+
338
+ return 1 # Assume at least 1 GPU if detection succeeded
339
+
340
+ def _detect_gstreamer_capabilities(self, platform_info: PlatformInfo):
341
+ """Detect available GStreamer elements."""
342
+ if not GST_AVAILABLE:
343
+ self.logger.warning("GStreamer not available, skipping capability detection")
344
+ return
345
+
346
+ # Encoders to check
347
+ encoders_to_check = {
348
+ # JPEG encoders
349
+ 'nvjpegenc': PlatformType.JETSON, # Jetson hardware JPEG
350
+ 'vaapijpegenc': (PlatformType.INTEL_GPU, PlatformType.AMD_GPU), # VAAPI JPEG
351
+ 'jpegenc': None, # CPU JPEG (always available if gst-plugins-good installed)
352
+
353
+ # H.264/H.265 encoders
354
+ 'nvh264enc': (PlatformType.DESKTOP_NVIDIA_GPU, PlatformType.JETSON), # NVENC H.264
355
+ 'nvh265enc': (PlatformType.DESKTOP_NVIDIA_GPU, PlatformType.JETSON), # NVENC H.265
356
+ 'nvv4l2h264enc': PlatformType.JETSON, # Jetson V4L2 H.264
357
+ 'nvv4l2h265enc': PlatformType.JETSON, # Jetson V4L2 H.265
358
+ 'vaapih264enc': (PlatformType.INTEL_GPU, PlatformType.AMD_GPU), # VAAPI H.264
359
+ 'vaapih265enc': (PlatformType.INTEL_GPU, PlatformType.AMD_GPU), # VAAPI H.265
360
+ 'x264enc': None, # CPU H.264
361
+ 'openh264enc': None, # OpenH264
362
+ }
363
+
364
+ # Decoders to check
365
+ decoders_to_check = {
366
+ 'nvv4l2decoder': PlatformType.JETSON, # Jetson hardware decoder
367
+ 'nvdec': PlatformType.DESKTOP_NVIDIA_GPU, # Desktop NVIDIA decoder
368
+ 'vaapih264dec': (PlatformType.INTEL_GPU, PlatformType.AMD_GPU), # VAAPI decoder
369
+ 'avdec_h264': None, # CPU decoder
370
+ }
371
+
372
+ # Converters to check
373
+ converters_to_check = {
374
+ 'nvvidconv': PlatformType.JETSON, # Jetson GPU converter
375
+ 'cudaconvert': (PlatformType.DESKTOP_NVIDIA_GPU, PlatformType.JETSON), # CUDA converter
376
+ 'videoconvert': None, # CPU converter
377
+ }
378
+
379
+ # Check encoders
380
+ for encoder, expected_platform in encoders_to_check.items():
381
+ if self._check_gstreamer_element(encoder):
382
+ platform_info.available_encoders.add(encoder)
383
+
384
+ # Check decoders
385
+ for decoder, expected_platform in decoders_to_check.items():
386
+ if self._check_gstreamer_element(decoder):
387
+ platform_info.available_decoders.add(decoder)
388
+
389
+ # Check converters
390
+ for converter, expected_platform in converters_to_check.items():
391
+ if self._check_gstreamer_element(converter):
392
+ platform_info.available_converters.add(converter)
393
+
394
+ # Set capability flags
395
+ platform_info.supports_nvmm = (
396
+ platform_info.platform_type == PlatformType.JETSON and
397
+ 'nvvidconv' in platform_info.available_converters
398
+ )
399
+
400
+ platform_info.supports_cuda_memory = (
401
+ platform_info.platform_type == PlatformType.DESKTOP_NVIDIA_GPU and
402
+ 'nvh264enc' in platform_info.available_encoders
403
+ )
404
+
405
+ platform_info.supports_vaapi = (
406
+ platform_info.platform_type in (PlatformType.INTEL_GPU, PlatformType.AMD_GPU) and
407
+ len(platform_info.available_encoders & {'vaapih264enc', 'vaapijpegenc'}) > 0
408
+ )
409
+
410
+ def _check_gstreamer_element(self, element_name: str) -> bool:
411
+ """Check if a GStreamer element is available.
412
+
413
+ Args:
414
+ element_name: Name of GStreamer element to check
415
+
416
+ Returns:
417
+ True if element is available, False otherwise
418
+ """
419
+ if not GST_AVAILABLE:
420
+ return False
421
+
422
+ try:
423
+ # Try to create element
424
+ element = Gst.ElementFactory.make(element_name, None)
425
+ if element is not None:
426
+ return True
427
+
428
+ # Alternative: Check factory
429
+ factory = Gst.ElementFactory.find(element_name)
430
+ return factory is not None
431
+
432
+ except Exception as e:
433
+ self.logger.debug(f"Element {element_name} not available: {e}")
434
+ return False
435
+
436
+ def _set_recommended_settings(self, platform_info: PlatformInfo):
437
+ """Set recommended encoder/decoder based on platform."""
438
+
439
+ # Recommended JPEG encoder
440
+ if platform_info.platform_type == PlatformType.JETSON:
441
+ if 'nvjpegenc' in platform_info.available_encoders:
442
+ platform_info.recommended_encoder = "nvjpegenc"
443
+ else:
444
+ platform_info.recommended_encoder = "jpeg" # Fallback to CPU
445
+
446
+ elif platform_info.platform_type in (PlatformType.INTEL_GPU, PlatformType.AMD_GPU):
447
+ if 'vaapijpegenc' in platform_info.available_encoders:
448
+ platform_info.recommended_encoder = "vaapijpegenc"
449
+ else:
450
+ platform_info.recommended_encoder = "jpeg"
451
+
452
+ else:
453
+ platform_info.recommended_encoder = "jpeg" # CPU
454
+
455
+ # Recommended decoder
456
+ if platform_info.platform_type == PlatformType.JETSON:
457
+ if 'nvv4l2decoder' in platform_info.available_decoders:
458
+ platform_info.recommended_decoder = "nvv4l2decoder"
459
+ else:
460
+ platform_info.recommended_decoder = "avdec_h264"
461
+
462
+ elif platform_info.platform_type == PlatformType.DESKTOP_NVIDIA_GPU:
463
+ if 'nvdec' in platform_info.available_decoders:
464
+ platform_info.recommended_decoder = "nvdec"
465
+ else:
466
+ platform_info.recommended_decoder = "avdec_h264"
467
+
468
+ elif platform_info.platform_type in (PlatformType.INTEL_GPU, PlatformType.AMD_GPU):
469
+ if 'vaapih264dec' in platform_info.available_decoders:
470
+ platform_info.recommended_decoder = "vaapih264dec"
471
+ else:
472
+ platform_info.recommended_decoder = "avdec_h264"
473
+
474
+ else:
475
+ platform_info.recommended_decoder = "avdec_h264"
476
+
477
+ # Recommended converter
478
+ if 'nvvidconv' in platform_info.available_converters:
479
+ platform_info.recommended_converter = "nvvidconv"
480
+ elif 'cudaconvert' in platform_info.available_converters:
481
+ platform_info.recommended_converter = "cudaconvert"
482
+ else:
483
+ platform_info.recommended_converter = "videoconvert"
484
+
485
+ # Recommended max workers (platform-appropriate)
486
+ if platform_info.platform_type == PlatformType.JETSON:
487
+ # Jetson has fewer cores, limit workers
488
+ platform_info.max_workers = 8
489
+ elif platform_info.platform_type == PlatformType.CPU_ONLY:
490
+ # CPU-only needs more workers for parallelism
491
+ platform_info.max_workers = min(os.cpu_count() or 4, 16)
492
+ else:
493
+ # GPU-accelerated can handle more cameras per worker
494
+ platform_info.max_workers = None # No hard limit
495
+
496
+
497
+ def get_platform_info(force_redetect: bool = False) -> PlatformInfo:
498
+ """Convenience function to get platform information.
499
+
500
+ Args:
501
+ force_redetect: Force re-detection even if cached
502
+
503
+ Returns:
504
+ PlatformInfo with detected capabilities
505
+ """
506
+ detector = PlatformDetector.get_instance()
507
+ return detector.detect(force_redetect=force_redetect)
@@ -0,0 +1,136 @@
1
+ """Encoding process pool manager for CPU-bound frame encoding operations.
2
+
3
+ This module manages a pool of worker processes dedicated to CPU-intensive
4
+ frame encoding tasks (JPEG compression, H265 encoding, etc.).
5
+ """
6
+ import logging
7
+ import multiprocessing
8
+ import os
9
+ from typing import Optional
10
+
11
+
12
+ class EncodingPoolManager:
13
+ """Manages a process pool for parallel frame encoding.
14
+
15
+ The encoding pool handles CPU-bound operations in separate processes
16
+ to bypass the GIL and achieve true parallel execution on multi-core systems.
17
+ """
18
+
19
+ def __init__(self, num_workers: Optional[int] = None):
20
+ """Initialize encoding pool manager.
21
+
22
+ Args:
23
+ num_workers: Number of encoding workers (default: CPU_count - 2)
24
+ """
25
+ if num_workers is None:
26
+ # Reserve 2 cores for main process and I/O workers
27
+ cpu_count = multiprocessing.cpu_count()
28
+ num_workers = max(2, cpu_count - 2)
29
+
30
+ self.num_workers = num_workers
31
+ self.pool: Optional[multiprocessing.Pool] = None
32
+ self.logger = logging.getLogger(__name__)
33
+
34
+ self.logger.info(
35
+ f"Encoding pool manager initialized with {num_workers} workers "
36
+ f"(CPU count: {multiprocessing.cpu_count()})"
37
+ )
38
+
39
+ def start(self):
40
+ """Start the encoding process pool."""
41
+ if self.pool is not None:
42
+ self.logger.warning("Encoding pool already started")
43
+ return
44
+
45
+ try:
46
+ # Create process pool with maxtasksperchild to prevent memory leaks
47
+ self.pool = multiprocessing.Pool(
48
+ processes=self.num_workers,
49
+ maxtasksperchild=1000 # Recycle workers after 1000 tasks
50
+ )
51
+ self.logger.info(f"Started encoding pool with {self.num_workers} workers")
52
+
53
+ except Exception as exc:
54
+ self.logger.error(f"Failed to start encoding pool: {exc}")
55
+ raise
56
+
57
+ def stop(self, timeout: float = 10.0):
58
+ """Stop the encoding process pool gracefully.
59
+
60
+ Args:
61
+ timeout: Maximum time to wait for workers to finish (seconds)
62
+ """
63
+ if self.pool is None:
64
+ self.logger.warning("Encoding pool not running")
65
+ return
66
+
67
+ try:
68
+ self.logger.info("Stopping encoding pool...")
69
+
70
+ # Close pool (no more tasks accepted)
71
+ self.pool.close()
72
+
73
+ # Wait for workers to finish with timeout
74
+ self.pool.join(timeout)
75
+
76
+ self.logger.info("Encoding pool stopped")
77
+
78
+ except Exception as exc:
79
+ self.logger.error(f"Error stopping encoding pool: {exc}")
80
+
81
+ # Force terminate if graceful shutdown fails
82
+ try:
83
+ self.pool.terminate()
84
+ self.pool.join(timeout=5.0)
85
+ self.logger.warning("Encoding pool forcefully terminated")
86
+ except Exception as term_exc:
87
+ self.logger.error(f"Failed to terminate encoding pool: {term_exc}")
88
+
89
+ finally:
90
+ self.pool = None
91
+
92
+ def get_pool(self) -> multiprocessing.Pool:
93
+ """Get the encoding process pool.
94
+
95
+ Returns:
96
+ Process pool instance
97
+
98
+ Raises:
99
+ RuntimeError: If pool is not started
100
+ """
101
+ if self.pool is None:
102
+ raise RuntimeError("Encoding pool not started. Call start() first.")
103
+ return self.pool
104
+
105
+ def is_running(self) -> bool:
106
+ """Check if encoding pool is running.
107
+
108
+ Returns:
109
+ True if pool is active
110
+ """
111
+ return self.pool is not None
112
+
113
+ def __enter__(self):
114
+ """Context manager entry."""
115
+ self.start()
116
+ return self
117
+
118
+ def __exit__(self, exc_type, exc_val, exc_tb):
119
+ """Context manager exit."""
120
+ self.stop()
121
+
122
+
123
+ def _init_worker():
124
+ """Initialize worker process.
125
+
126
+ This runs once per worker process on startup.
127
+ Can be used to set up per-worker resources.
128
+ """
129
+ # Set process affinity if needed
130
+ # os.sched_setaffinity(0, {cpu_id})
131
+
132
+ # Setup minimal logging for workers
133
+ logging.basicConfig(
134
+ level=logging.WARNING,
135
+ format='%(asctime)s - EncodingWorker - %(levelname)s - %(message)s'
136
+ )