rocket-welder-sdk 1.0.5__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rocket_welder_sdk/__init__.py +30 -4
- rocket_welder_sdk/bytes_size.py +234 -0
- rocket_welder_sdk/connection_string.py +232 -0
- rocket_welder_sdk/controllers.py +609 -0
- rocket_welder_sdk/gst_metadata.py +240 -0
- rocket_welder_sdk/py.typed +2 -0
- rocket_welder_sdk/rocket_welder_client.py +170 -0
- rocket_welder_sdk-1.1.0.dist-info/METADATA +496 -0
- rocket_welder_sdk-1.1.0.dist-info/RECORD +11 -0
- rocket_welder_sdk/client.py +0 -183
- rocket_welder_sdk/rocket_welder_sdk/__init__.py +0 -20
- rocket_welder_sdk/rocket_welder_sdk/client.py +0 -326
- rocket_welder_sdk/rocket_welder_sdk/connection_string.py +0 -190
- rocket_welder_sdk/rocket_welder_sdk/exceptions.py +0 -23
- rocket_welder_sdk/rocket_welder_sdk/gst_caps.py +0 -224
- rocket_welder_sdk/rocket_welder_sdk/gst_metadata.py +0 -43
- rocket_welder_sdk-1.0.5.dist-info/METADATA +0 -36
- rocket_welder_sdk-1.0.5.dist-info/RECORD +0 -12
- {rocket_welder_sdk-1.0.5.dist-info → rocket_welder_sdk-1.1.0.dist-info}/WHEEL +0 -0
- {rocket_welder_sdk-1.0.5.dist-info → rocket_welder_sdk-1.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,609 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Enterprise-grade controller implementations for RocketWelder SDK.
|
|
3
|
+
Provides OneWay and Duplex shared memory controllers for video streaming.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import threading
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
from typing import Any, Callable
|
|
13
|
+
|
|
14
|
+
import numpy as np
|
|
15
|
+
from zerobuffer import BufferConfig, Frame, Reader, Writer
|
|
16
|
+
from zerobuffer.duplex import DuplexChannelFactory, IImmutableDuplexServer
|
|
17
|
+
|
|
18
|
+
from .connection_string import ConnectionMode, ConnectionString, Protocol
|
|
19
|
+
from .gst_metadata import GstCaps, GstMetadata
|
|
20
|
+
|
|
21
|
+
# Type alias for OpenCV Mat
|
|
22
|
+
Mat = np.ndarray[Any, Any]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class IController(ABC):
|
|
26
|
+
"""Abstract base class for controllers."""
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
@abstractmethod
|
|
30
|
+
def is_running(self) -> bool:
|
|
31
|
+
"""Check if the controller is running."""
|
|
32
|
+
...
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def get_metadata(self) -> GstMetadata | None:
|
|
36
|
+
"""Get the current GStreamer metadata."""
|
|
37
|
+
...
|
|
38
|
+
|
|
39
|
+
@abstractmethod
|
|
40
|
+
def start(
|
|
41
|
+
self, on_frame: Callable[[Mat], None], cancellation_token: threading.Event | None = None
|
|
42
|
+
) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Start the controller with a frame callback.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
on_frame: Callback for processing frames
|
|
48
|
+
cancellation_token: Optional cancellation token
|
|
49
|
+
"""
|
|
50
|
+
...
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
def stop(self) -> None:
|
|
54
|
+
"""Stop the controller."""
|
|
55
|
+
...
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class OneWayShmController(IController):
|
|
59
|
+
"""
|
|
60
|
+
One-way shared memory controller for receiving video frames.
|
|
61
|
+
|
|
62
|
+
This controller creates a shared memory buffer that GStreamer connects to
|
|
63
|
+
as a zerosink, allowing zero-copy frame reception.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(self, connection: ConnectionString, logger: logging.Logger | None = None):
|
|
67
|
+
"""
|
|
68
|
+
Initialize the one-way controller.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
connection: Connection string configuration
|
|
72
|
+
logger: Optional logger instance
|
|
73
|
+
"""
|
|
74
|
+
if connection.protocol != Protocol.SHM:
|
|
75
|
+
raise ValueError(
|
|
76
|
+
f"OneWayShmController requires SHM protocol, got {connection.protocol}"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
self._connection = connection
|
|
80
|
+
self._logger = logger or logging.getLogger(__name__)
|
|
81
|
+
self._reader_logger = logging.getLogger(f"{__name__}.Reader")
|
|
82
|
+
self._reader: Reader | None = None
|
|
83
|
+
self._gst_caps: GstCaps | None = None
|
|
84
|
+
self._metadata: GstMetadata | None = None
|
|
85
|
+
self._is_running = False
|
|
86
|
+
self._worker_thread: threading.Thread | None = None
|
|
87
|
+
self._cancellation_token: threading.Event | None = None
|
|
88
|
+
|
|
89
|
+
@property
|
|
90
|
+
def is_running(self) -> bool:
|
|
91
|
+
"""Check if the controller is running."""
|
|
92
|
+
return self._is_running
|
|
93
|
+
|
|
94
|
+
def get_metadata(self) -> GstMetadata | None:
|
|
95
|
+
"""Get the current GStreamer metadata."""
|
|
96
|
+
return self._metadata
|
|
97
|
+
|
|
98
|
+
def start(
|
|
99
|
+
self, on_frame: Callable[[Mat], None], cancellation_token: threading.Event | None = None
|
|
100
|
+
) -> None:
|
|
101
|
+
"""
|
|
102
|
+
Start receiving frames from shared memory.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
on_frame: Callback for processing received frames
|
|
106
|
+
cancellation_token: Optional cancellation token
|
|
107
|
+
"""
|
|
108
|
+
if self._is_running:
|
|
109
|
+
raise RuntimeError("Controller is already running")
|
|
110
|
+
|
|
111
|
+
self._logger.debug(
|
|
112
|
+
"Starting OneWayShmController for buffer '%s'", self._connection.buffer_name
|
|
113
|
+
)
|
|
114
|
+
self._is_running = True
|
|
115
|
+
self._cancellation_token = cancellation_token
|
|
116
|
+
|
|
117
|
+
# Create buffer configuration
|
|
118
|
+
config = BufferConfig(
|
|
119
|
+
metadata_size=int(self._connection.metadata_size),
|
|
120
|
+
payload_size=int(self._connection.buffer_size),
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
# Create reader (we are the server, GStreamer connects to us)
|
|
124
|
+
# Pass logger to Reader for better debugging
|
|
125
|
+
if not self._connection.buffer_name:
|
|
126
|
+
raise ValueError("Buffer name is required for shared memory connection")
|
|
127
|
+
self._reader = Reader(self._connection.buffer_name, config, logger=self._reader_logger)
|
|
128
|
+
|
|
129
|
+
self._logger.info(
|
|
130
|
+
"Created shared memory buffer '%s' with size %s and metadata %s",
|
|
131
|
+
self._connection.buffer_name,
|
|
132
|
+
self._connection.buffer_size,
|
|
133
|
+
self._connection.metadata_size,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# Start processing thread
|
|
137
|
+
self._worker_thread = threading.Thread(
|
|
138
|
+
target=self._process_frames,
|
|
139
|
+
args=(on_frame,),
|
|
140
|
+
name=f"RocketWelder-{self._connection.buffer_name}",
|
|
141
|
+
)
|
|
142
|
+
self._worker_thread.start()
|
|
143
|
+
|
|
144
|
+
def stop(self) -> None:
|
|
145
|
+
"""Stop the controller and clean up resources."""
|
|
146
|
+
if not self._is_running:
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
self._logger.debug("Stopping controller for buffer '%s'", self._connection.buffer_name)
|
|
150
|
+
self._is_running = False
|
|
151
|
+
|
|
152
|
+
# Wait for worker thread to finish
|
|
153
|
+
if self._worker_thread and self._worker_thread.is_alive():
|
|
154
|
+
timeout_ms = self._connection.timeout_ms + 50
|
|
155
|
+
self._worker_thread.join(timeout=timeout_ms / 1000.0)
|
|
156
|
+
|
|
157
|
+
# Clean up reader
|
|
158
|
+
if self._reader:
|
|
159
|
+
self._reader.close()
|
|
160
|
+
self._reader = None
|
|
161
|
+
|
|
162
|
+
self._worker_thread = None
|
|
163
|
+
self._logger.info("Stopped controller for buffer '%s'", self._connection.buffer_name)
|
|
164
|
+
|
|
165
|
+
def _process_frames(self, on_frame: Callable[[Mat], None]) -> None:
|
|
166
|
+
"""
|
|
167
|
+
Process frames from shared memory.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
on_frame: Callback for processing frames
|
|
171
|
+
"""
|
|
172
|
+
try:
|
|
173
|
+
# Process first frame to get metadata
|
|
174
|
+
self._on_first_frame(on_frame)
|
|
175
|
+
|
|
176
|
+
# Process remaining frames
|
|
177
|
+
while self._is_running and (
|
|
178
|
+
not self._cancellation_token or not self._cancellation_token.is_set()
|
|
179
|
+
):
|
|
180
|
+
try:
|
|
181
|
+
# ReadFrame blocks until frame available
|
|
182
|
+
# Use timeout in seconds directly
|
|
183
|
+
timeout_seconds = self._connection.timeout_ms / 1000.0
|
|
184
|
+
frame = self._reader.read_frame(timeout=timeout_seconds) # type: ignore[union-attr]
|
|
185
|
+
|
|
186
|
+
if frame is None or not frame.is_valid:
|
|
187
|
+
continue # Skip invalid frames
|
|
188
|
+
|
|
189
|
+
# Process frame data using context manager
|
|
190
|
+
with frame:
|
|
191
|
+
# Create Mat from frame data (zero-copy when possible)
|
|
192
|
+
mat = self._create_mat_from_frame(frame)
|
|
193
|
+
if mat is not None:
|
|
194
|
+
on_frame(mat)
|
|
195
|
+
|
|
196
|
+
except Exception as e:
|
|
197
|
+
# Log specific error types like C#
|
|
198
|
+
error_type = type(e).__name__
|
|
199
|
+
if "ReaderDead" in error_type or "WriterDead" in error_type:
|
|
200
|
+
self._logger.info(
|
|
201
|
+
"Writer disconnected from buffer '%s'", self._connection.buffer_name
|
|
202
|
+
)
|
|
203
|
+
self._is_running = False
|
|
204
|
+
break
|
|
205
|
+
elif "BufferFull" in error_type:
|
|
206
|
+
self._logger.error(
|
|
207
|
+
"Buffer full on '%s': %s", self._connection.buffer_name, e
|
|
208
|
+
)
|
|
209
|
+
if not self._is_running:
|
|
210
|
+
break
|
|
211
|
+
elif "FrameTooLarge" in error_type:
|
|
212
|
+
self._logger.error(
|
|
213
|
+
"Frame too large on '%s': %s", self._connection.buffer_name, e
|
|
214
|
+
)
|
|
215
|
+
if not self._is_running:
|
|
216
|
+
break
|
|
217
|
+
elif "ZeroBuffer" in error_type:
|
|
218
|
+
self._logger.error(
|
|
219
|
+
"ZeroBuffer error on '%s': %s", self._connection.buffer_name, e
|
|
220
|
+
)
|
|
221
|
+
if not self._is_running:
|
|
222
|
+
break
|
|
223
|
+
else:
|
|
224
|
+
self._logger.error(
|
|
225
|
+
"Unexpected error processing frame from buffer '%s': %s",
|
|
226
|
+
self._connection.buffer_name,
|
|
227
|
+
e,
|
|
228
|
+
)
|
|
229
|
+
if not self._is_running:
|
|
230
|
+
break
|
|
231
|
+
|
|
232
|
+
except Exception as e:
|
|
233
|
+
self._logger.error("Fatal error in frame processing loop: %s", e)
|
|
234
|
+
self._is_running = False
|
|
235
|
+
|
|
236
|
+
def _on_first_frame(self, on_frame: Callable[[Mat], None]) -> None:
|
|
237
|
+
"""
|
|
238
|
+
Process the first frame and extract metadata.
|
|
239
|
+
Matches C# OnFirstFrame behavior - loops until valid frame received.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
on_frame: Callback for processing frames
|
|
243
|
+
"""
|
|
244
|
+
while self._is_running and (
|
|
245
|
+
not self._cancellation_token or not self._cancellation_token.is_set()
|
|
246
|
+
):
|
|
247
|
+
try:
|
|
248
|
+
# ReadFrame blocks until frame available
|
|
249
|
+
timeout_seconds = self._connection.timeout_ms / 1000.0
|
|
250
|
+
frame = self._reader.read_frame(timeout=timeout_seconds) # type: ignore[union-attr]
|
|
251
|
+
|
|
252
|
+
if frame is None or not frame.is_valid:
|
|
253
|
+
continue # Skip invalid frames
|
|
254
|
+
|
|
255
|
+
with frame:
|
|
256
|
+
# Read metadata - we ALWAYS expect metadata (like C#)
|
|
257
|
+
metadata_bytes = self._reader.get_metadata() # type: ignore[union-attr]
|
|
258
|
+
if metadata_bytes:
|
|
259
|
+
try:
|
|
260
|
+
# Log raw metadata for debugging
|
|
261
|
+
self._logger.debug(
|
|
262
|
+
"Raw metadata: %d bytes, type=%s, first 100 bytes: %s",
|
|
263
|
+
len(metadata_bytes),
|
|
264
|
+
type(metadata_bytes),
|
|
265
|
+
bytes(metadata_bytes[:min(100, len(metadata_bytes))]),
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
# Convert memoryview to bytes if needed
|
|
269
|
+
if isinstance(metadata_bytes, memoryview):
|
|
270
|
+
metadata_bytes = bytes(metadata_bytes)
|
|
271
|
+
|
|
272
|
+
# Decode UTF-8
|
|
273
|
+
metadata_str = metadata_bytes.decode("utf-8")
|
|
274
|
+
|
|
275
|
+
# Check if metadata is empty or all zeros
|
|
276
|
+
if not metadata_str or metadata_str == '\x00' * len(metadata_str):
|
|
277
|
+
self._logger.warning("Metadata is empty or all zeros, skipping")
|
|
278
|
+
continue
|
|
279
|
+
|
|
280
|
+
# Find the start of JSON (skip any null bytes at the beginning)
|
|
281
|
+
json_start = metadata_str.find('{')
|
|
282
|
+
if json_start == -1:
|
|
283
|
+
self._logger.warning("No JSON found in metadata: %r", metadata_str[:100])
|
|
284
|
+
continue
|
|
285
|
+
|
|
286
|
+
if json_start > 0:
|
|
287
|
+
self._logger.debug("Skipping %d bytes before JSON", json_start)
|
|
288
|
+
metadata_str = metadata_str[json_start:]
|
|
289
|
+
|
|
290
|
+
# Find the end of JSON (handle null padding)
|
|
291
|
+
json_end = metadata_str.rfind('}')
|
|
292
|
+
if json_end != -1 and json_end < len(metadata_str) - 1:
|
|
293
|
+
metadata_str = metadata_str[:json_end + 1]
|
|
294
|
+
|
|
295
|
+
metadata_json = json.loads(metadata_str)
|
|
296
|
+
self._metadata = GstMetadata.from_json(metadata_json)
|
|
297
|
+
self._gst_caps = self._metadata.caps
|
|
298
|
+
self._logger.info(
|
|
299
|
+
"Received metadata from buffer '%s': %s",
|
|
300
|
+
self._connection.buffer_name,
|
|
301
|
+
self._gst_caps,
|
|
302
|
+
)
|
|
303
|
+
except Exception as e:
|
|
304
|
+
self._logger.error("Failed to parse metadata: %s", e)
|
|
305
|
+
# Log the actual metadata content for debugging
|
|
306
|
+
if metadata_bytes:
|
|
307
|
+
self._logger.debug("Metadata content: %r", metadata_bytes[:200])
|
|
308
|
+
# Don't continue without metadata
|
|
309
|
+
continue
|
|
310
|
+
|
|
311
|
+
# Process first frame
|
|
312
|
+
mat = self._create_mat_from_frame(frame)
|
|
313
|
+
if mat is not None:
|
|
314
|
+
on_frame(mat)
|
|
315
|
+
return # Successfully processed first frame
|
|
316
|
+
|
|
317
|
+
except Exception as e:
|
|
318
|
+
error_type = type(e).__name__
|
|
319
|
+
if "ReaderDead" in error_type or "WriterDead" in error_type:
|
|
320
|
+
self._is_running = False
|
|
321
|
+
self._logger.info(
|
|
322
|
+
"Writer disconnected while waiting for first frame on buffer '%s'",
|
|
323
|
+
self._connection.buffer_name,
|
|
324
|
+
)
|
|
325
|
+
raise
|
|
326
|
+
else:
|
|
327
|
+
self._logger.error(
|
|
328
|
+
"Error waiting for first frame on buffer '%s': %s",
|
|
329
|
+
self._connection.buffer_name,
|
|
330
|
+
e,
|
|
331
|
+
)
|
|
332
|
+
if not self._is_running:
|
|
333
|
+
break
|
|
334
|
+
|
|
335
|
+
def _create_mat_from_frame(self, frame: Frame) -> Mat | None:
|
|
336
|
+
"""
|
|
337
|
+
Create OpenCV Mat from frame data using GstCaps.
|
|
338
|
+
Matches C# CreateMat behavior - creates Mat wrapping the data.
|
|
339
|
+
|
|
340
|
+
Args:
|
|
341
|
+
frame: ZeroBuffer frame
|
|
342
|
+
|
|
343
|
+
Returns:
|
|
344
|
+
OpenCV Mat or None if conversion failed
|
|
345
|
+
"""
|
|
346
|
+
try:
|
|
347
|
+
# Match C# CreateMat behavior: Create Mat wrapping the existing data
|
|
348
|
+
if self._gst_caps and self._gst_caps.width and self._gst_caps.height:
|
|
349
|
+
width = self._gst_caps.width
|
|
350
|
+
height = self._gst_caps.height
|
|
351
|
+
|
|
352
|
+
# Determine channels from format (like C# MapGStreamerFormatToEmgu)
|
|
353
|
+
format_str = self._gst_caps.format or "RGB"
|
|
354
|
+
if format_str in ["RGB", "BGR"]:
|
|
355
|
+
channels = 3
|
|
356
|
+
elif format_str in ["RGBA", "BGRA", "ARGB", "ABGR"]:
|
|
357
|
+
channels = 4
|
|
358
|
+
elif format_str in ["GRAY8", "GRAY16_LE", "GRAY16_BE"]:
|
|
359
|
+
channels = 1
|
|
360
|
+
else:
|
|
361
|
+
channels = 3 # Default to RGB
|
|
362
|
+
|
|
363
|
+
# Get frame data directly as numpy array (zero-copy view)
|
|
364
|
+
# Frame.data is already a memoryview/buffer that can be wrapped
|
|
365
|
+
data = np.frombuffer(frame.data, dtype=np.uint8)
|
|
366
|
+
|
|
367
|
+
# Check data size matches expected
|
|
368
|
+
expected_size = height * width * channels
|
|
369
|
+
if len(data) != expected_size:
|
|
370
|
+
self._logger.error(
|
|
371
|
+
"Data size mismatch. Expected %d bytes for %dx%d with %d channels, got %d",
|
|
372
|
+
expected_size,
|
|
373
|
+
width,
|
|
374
|
+
height,
|
|
375
|
+
channels,
|
|
376
|
+
len(data),
|
|
377
|
+
)
|
|
378
|
+
return None
|
|
379
|
+
|
|
380
|
+
# Reshape to image dimensions - this is zero-copy, just changes the view
|
|
381
|
+
# This matches C#: new Mat(Height, Width, Depth, Channels, ptr, Width * Channels)
|
|
382
|
+
if channels == 3:
|
|
383
|
+
mat = data.reshape((height, width, 3))
|
|
384
|
+
elif channels == 1:
|
|
385
|
+
mat = data.reshape((height, width))
|
|
386
|
+
elif channels == 4:
|
|
387
|
+
mat = data.reshape((height, width, 4))
|
|
388
|
+
else:
|
|
389
|
+
self._logger.error("Unsupported channel count: %d", channels)
|
|
390
|
+
return None
|
|
391
|
+
|
|
392
|
+
return mat # type: ignore[no-any-return]
|
|
393
|
+
|
|
394
|
+
# No caps available
|
|
395
|
+
self._logger.error("No GstCaps available for frame conversion")
|
|
396
|
+
return None
|
|
397
|
+
|
|
398
|
+
except Exception as e:
|
|
399
|
+
self._logger.error("Failed to convert frame to Mat: %s", e)
|
|
400
|
+
return None
|
|
401
|
+
|
|
402
|
+
def _infer_caps_from_frame(self, mat: Mat) -> None:
|
|
403
|
+
"""
|
|
404
|
+
Infer GStreamer caps from OpenCV Mat.
|
|
405
|
+
|
|
406
|
+
Args:
|
|
407
|
+
mat: OpenCV Mat
|
|
408
|
+
"""
|
|
409
|
+
if mat is None:
|
|
410
|
+
return
|
|
411
|
+
|
|
412
|
+
shape = mat.shape
|
|
413
|
+
if len(shape) == 2:
|
|
414
|
+
# Grayscale
|
|
415
|
+
self._gst_caps = GstCaps(format="GRAY8", width=shape[1], height=shape[0])
|
|
416
|
+
elif len(shape) == 3:
|
|
417
|
+
# Color image
|
|
418
|
+
self._gst_caps = GstCaps(format="BGR", width=shape[1], height=shape[0])
|
|
419
|
+
|
|
420
|
+
self._logger.info("Inferred caps from frame: %s", self._gst_caps)
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
class DuplexShmController(IController):
|
|
424
|
+
"""
|
|
425
|
+
Duplex shared memory controller for bidirectional video streaming.
|
|
426
|
+
|
|
427
|
+
This controller supports both receiving frames from one buffer and
|
|
428
|
+
sending processed frames to another buffer.
|
|
429
|
+
"""
|
|
430
|
+
|
|
431
|
+
def __init__(self, connection: ConnectionString, logger: logging.Logger | None = None):
|
|
432
|
+
"""
|
|
433
|
+
Initialize the duplex controller.
|
|
434
|
+
|
|
435
|
+
Args:
|
|
436
|
+
connection: Connection string configuration
|
|
437
|
+
logger: Optional logger instance
|
|
438
|
+
"""
|
|
439
|
+
if connection.protocol != Protocol.SHM:
|
|
440
|
+
raise ValueError(
|
|
441
|
+
f"DuplexShmController requires SHM protocol, got {connection.protocol}"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
if connection.connection_mode != ConnectionMode.DUPLEX:
|
|
445
|
+
raise ValueError(
|
|
446
|
+
f"DuplexShmController requires DUPLEX mode, got {connection.connection_mode}"
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
self._connection = connection
|
|
450
|
+
self._logger = logger or logging.getLogger(__name__)
|
|
451
|
+
self._duplex_server: IImmutableDuplexServer | None = None
|
|
452
|
+
self._gst_caps: GstCaps | None = None
|
|
453
|
+
self._metadata: GstMetadata | None = None
|
|
454
|
+
self._is_running = False
|
|
455
|
+
self._on_frame_callback: Callable[[Mat, Mat], None] | None = None
|
|
456
|
+
self._frame_count = 0
|
|
457
|
+
|
|
458
|
+
@property
|
|
459
|
+
def is_running(self) -> bool:
|
|
460
|
+
"""Check if the controller is running."""
|
|
461
|
+
return self._is_running
|
|
462
|
+
|
|
463
|
+
def get_metadata(self) -> GstMetadata | None:
|
|
464
|
+
"""Get the current GStreamer metadata."""
|
|
465
|
+
return self._metadata
|
|
466
|
+
|
|
467
|
+
def start(
|
|
468
|
+
self,
|
|
469
|
+
on_frame: Callable[[Mat, Mat], None], # type: ignore[override]
|
|
470
|
+
cancellation_token: threading.Event | None = None,
|
|
471
|
+
) -> None:
|
|
472
|
+
"""
|
|
473
|
+
Start duplex frame processing.
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
on_frame: Callback that receives input frame and output frame to fill
|
|
477
|
+
cancellation_token: Optional cancellation token
|
|
478
|
+
"""
|
|
479
|
+
if self._is_running:
|
|
480
|
+
raise RuntimeError("Controller is already running")
|
|
481
|
+
|
|
482
|
+
self._is_running = True
|
|
483
|
+
self._on_frame_callback = on_frame
|
|
484
|
+
|
|
485
|
+
# Create buffer configuration
|
|
486
|
+
config = BufferConfig(
|
|
487
|
+
metadata_size=int(self._connection.metadata_size),
|
|
488
|
+
payload_size=int(self._connection.buffer_size),
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
# Create duplex server using factory
|
|
492
|
+
# Convert timeout from milliseconds to seconds for Python API
|
|
493
|
+
if not self._connection.buffer_name:
|
|
494
|
+
raise ValueError("Buffer name is required for shared memory connection")
|
|
495
|
+
timeout_seconds = self._connection.timeout_ms / 1000.0
|
|
496
|
+
factory = DuplexChannelFactory()
|
|
497
|
+
self._duplex_server = factory.create_immutable_server(
|
|
498
|
+
self._connection.buffer_name, config, timeout_seconds
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
self._logger.info(
|
|
502
|
+
"Starting duplex server for channel '%s' with size %s and metadata %s",
|
|
503
|
+
self._connection.buffer_name,
|
|
504
|
+
self._connection.buffer_size,
|
|
505
|
+
self._connection.metadata_size,
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
# Start server with frame processor callback
|
|
509
|
+
if self._duplex_server:
|
|
510
|
+
self._duplex_server.start(self._process_duplex_frame, self._on_metadata)
|
|
511
|
+
|
|
512
|
+
def stop(self) -> None:
|
|
513
|
+
"""Stop the controller and clean up resources."""
|
|
514
|
+
if not self._is_running:
|
|
515
|
+
return
|
|
516
|
+
|
|
517
|
+
self._logger.info("Stopping DuplexShmController")
|
|
518
|
+
self._is_running = False
|
|
519
|
+
|
|
520
|
+
# Stop the duplex server
|
|
521
|
+
if self._duplex_server:
|
|
522
|
+
self._duplex_server.stop()
|
|
523
|
+
self._duplex_server = None
|
|
524
|
+
|
|
525
|
+
self._logger.info("DuplexShmController stopped")
|
|
526
|
+
|
|
527
|
+
def _on_metadata(self, metadata_bytes: bytes | memoryview) -> None:
|
|
528
|
+
"""
|
|
529
|
+
Handle metadata from duplex channel.
|
|
530
|
+
|
|
531
|
+
Args:
|
|
532
|
+
metadata_bytes: Raw metadata bytes or memoryview
|
|
533
|
+
"""
|
|
534
|
+
try:
|
|
535
|
+
# Convert memoryview to bytes if needed
|
|
536
|
+
if isinstance(metadata_bytes, memoryview):
|
|
537
|
+
metadata_bytes = bytes(metadata_bytes)
|
|
538
|
+
|
|
539
|
+
metadata_str = metadata_bytes.decode("utf-8")
|
|
540
|
+
metadata_json = json.loads(metadata_str)
|
|
541
|
+
self._metadata = GstMetadata.from_json(metadata_json)
|
|
542
|
+
self._gst_caps = self._metadata.caps
|
|
543
|
+
self._logger.info("Received metadata: %s", self._metadata)
|
|
544
|
+
except Exception as e:
|
|
545
|
+
self._logger.warning("Failed to parse metadata: %s", e)
|
|
546
|
+
|
|
547
|
+
def _process_duplex_frame(self, request_frame: Frame, response_writer: Writer) -> None:
|
|
548
|
+
"""
|
|
549
|
+
Process a frame in duplex mode.
|
|
550
|
+
|
|
551
|
+
Args:
|
|
552
|
+
request_frame: Input frame from the request
|
|
553
|
+
response_writer: Writer for the response frame
|
|
554
|
+
"""
|
|
555
|
+
try:
|
|
556
|
+
if not self._on_frame_callback:
|
|
557
|
+
return
|
|
558
|
+
|
|
559
|
+
self._frame_count += 1
|
|
560
|
+
|
|
561
|
+
# Convert input frame to Mat
|
|
562
|
+
input_mat = self._frame_to_mat(request_frame)
|
|
563
|
+
if input_mat is None:
|
|
564
|
+
return
|
|
565
|
+
|
|
566
|
+
# Get buffer for output frame - use context manager for RAII
|
|
567
|
+
with response_writer.get_frame_buffer(request_frame.size) as output_buffer:
|
|
568
|
+
# Create output Mat from buffer (zero-copy)
|
|
569
|
+
if self._gst_caps:
|
|
570
|
+
height = self._gst_caps.height or 480
|
|
571
|
+
width = self._gst_caps.width or 640
|
|
572
|
+
|
|
573
|
+
if self._gst_caps.format == "RGB" or self._gst_caps.format == "BGR":
|
|
574
|
+
output_mat = np.frombuffer(output_buffer, dtype=np.uint8).reshape(
|
|
575
|
+
(height, width, 3)
|
|
576
|
+
)
|
|
577
|
+
elif self._gst_caps.format == "GRAY8":
|
|
578
|
+
output_mat = np.frombuffer(output_buffer, dtype=np.uint8).reshape(
|
|
579
|
+
(height, width)
|
|
580
|
+
)
|
|
581
|
+
else:
|
|
582
|
+
# Default to same shape as input
|
|
583
|
+
output_mat = np.frombuffer(output_buffer, dtype=np.uint8).reshape(
|
|
584
|
+
input_mat.shape
|
|
585
|
+
)
|
|
586
|
+
else:
|
|
587
|
+
# Use same shape as input
|
|
588
|
+
output_mat = np.frombuffer(output_buffer, dtype=np.uint8).reshape(input_mat.shape)
|
|
589
|
+
|
|
590
|
+
# Call user's processing function
|
|
591
|
+
self._on_frame_callback(input_mat, output_mat)
|
|
592
|
+
|
|
593
|
+
# Commit the response frame after buffer is released
|
|
594
|
+
response_writer.commit_frame()
|
|
595
|
+
|
|
596
|
+
self._logger.debug(
|
|
597
|
+
"Processed duplex frame %d (%dx%d)",
|
|
598
|
+
self._frame_count,
|
|
599
|
+
input_mat.shape[1],
|
|
600
|
+
input_mat.shape[0],
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
except Exception as e:
|
|
604
|
+
self._logger.error("Error processing duplex frame: %s", e)
|
|
605
|
+
|
|
606
|
+
def _frame_to_mat(self, frame: Frame) -> Mat | None:
|
|
607
|
+
"""Convert frame to OpenCV Mat (reuse from OneWayShmController)."""
|
|
608
|
+
# Implementation is same as OneWayShmController
|
|
609
|
+
return OneWayShmController._create_mat_from_frame(self, frame) # type: ignore[arg-type]
|